graphics.c 78 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312
  1. /******************************************************************************
  2. Copyright (C) 2023 by Lain Bailey <[email protected]>
  3. This program is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU General Public License as published by
  5. the Free Software Foundation, either version 2 of the License, or
  6. (at your option) any later version.
  7. This program is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU General Public License for more details.
  11. You should have received a copy of the GNU General Public License
  12. along with this program. If not, see <http://www.gnu.org/licenses/>.
  13. ******************************************************************************/
  14. #include <assert.h>
  15. #include "../util/base.h"
  16. #include "../util/bmem.h"
  17. #include "../util/platform.h"
  18. #include "graphics-internal.h"
  19. #include "vec2.h"
  20. #include "vec3.h"
  21. #include "quat.h"
  22. #include "axisang.h"
  23. #include "effect-parser.h"
  24. #include "effect.h"
  25. #ifdef near
  26. #undef near
  27. #endif
  28. #ifdef far
  29. #undef far
  30. #endif
  31. static THREAD_LOCAL graphics_t *thread_graphics = NULL;
  32. static inline bool gs_obj_valid(const void *obj, const char *f, const char *name)
  33. {
  34. if (!obj) {
  35. blog(LOG_DEBUG, "%s: Null '%s' parameter", f, name);
  36. return false;
  37. }
  38. return true;
  39. }
  40. static inline bool gs_valid(const char *f)
  41. {
  42. if (!thread_graphics) {
  43. blog(LOG_DEBUG, "%s: called while not in a graphics context", f);
  44. return false;
  45. }
  46. return true;
  47. }
  48. #define ptr_valid(ptr, func) gs_obj_valid(ptr, func, #ptr)
  49. #define gs_valid_p(func, param1) (gs_valid(func) && ptr_valid(param1, func))
  50. #define gs_valid_p2(func, param1, param2) (gs_valid(func) && ptr_valid(param1, func) && ptr_valid(param2, func))
  51. #define gs_valid_p3(func, param1, param2, param3) \
  52. (gs_valid(func) && ptr_valid(param1, func) && ptr_valid(param2, func) && ptr_valid(param3, func))
  53. #define IMMEDIATE_COUNT 512
  54. void gs_enum_adapters(bool (*callback)(void *param, const char *name, uint32_t id), void *param)
  55. {
  56. graphics_t *graphics = thread_graphics;
  57. if (!gs_valid_p("gs_enum_adapters", callback))
  58. return;
  59. if (graphics->exports.device_enum_adapters) {
  60. if (graphics->exports.device_enum_adapters(graphics->device, callback, param)) {
  61. return;
  62. }
  63. }
  64. /* If the subsystem does not currently support device enumeration of
  65. * adapters or fails to enumerate adapters, just set it to one adapter
  66. * named "Default" */
  67. callback(param, "Default", 0);
  68. }
  69. extern void gs_init_image_deps(void);
  70. extern void gs_free_image_deps(void);
  71. bool load_graphics_imports(struct gs_exports *exports, void *module, const char *module_name);
  72. static bool graphics_init_immediate_vb(struct graphics_subsystem *graphics)
  73. {
  74. struct gs_vb_data *vbd;
  75. vbd = gs_vbdata_create();
  76. vbd->num = IMMEDIATE_COUNT;
  77. vbd->points = bmalloc(sizeof(struct vec3) * IMMEDIATE_COUNT);
  78. vbd->normals = bmalloc(sizeof(struct vec3) * IMMEDIATE_COUNT);
  79. vbd->colors = bmalloc(sizeof(uint32_t) * IMMEDIATE_COUNT);
  80. vbd->num_tex = 1;
  81. vbd->tvarray = bmalloc(sizeof(struct gs_tvertarray));
  82. vbd->tvarray[0].width = 2;
  83. vbd->tvarray[0].array = bmalloc(sizeof(struct vec2) * IMMEDIATE_COUNT);
  84. graphics->immediate_vertbuffer =
  85. graphics->exports.device_vertexbuffer_create(graphics->device, vbd, GS_DYNAMIC);
  86. if (!graphics->immediate_vertbuffer)
  87. return false;
  88. return true;
  89. }
  90. static bool graphics_init_sprite_vbs(struct graphics_subsystem *graphics)
  91. {
  92. struct gs_vb_data *vbd;
  93. vbd = gs_vbdata_create();
  94. vbd->num = 4;
  95. vbd->points = bzalloc(sizeof(struct vec3) * 4);
  96. vbd->num_tex = 1;
  97. vbd->tvarray = bzalloc(sizeof(struct gs_tvertarray));
  98. vbd->tvarray[0].width = 2;
  99. vbd->tvarray[0].array = bzalloc(sizeof(struct vec2) * 4);
  100. vbd->points[1].x = 1.0f;
  101. vbd->points[2].y = 1.0f;
  102. vbd->points[3].x = 1.0f;
  103. vbd->points[3].y = 1.0f;
  104. struct vec2 *uvs = vbd->tvarray[0].array;
  105. uvs[1].x = 1.0f;
  106. uvs[2].y = 1.0f;
  107. uvs[3].x = 1.0f;
  108. uvs[3].y = 1.0f;
  109. graphics->sprite_buffer = gs_vertexbuffer_create(vbd, GS_DUP_BUFFER);
  110. if (!graphics->sprite_buffer)
  111. return false;
  112. graphics->subregion_buffer = gs_vertexbuffer_create(vbd, GS_DUP_BUFFER | GS_DYNAMIC);
  113. if (!graphics->subregion_buffer)
  114. return false;
  115. uvs[0].y = 1.0f;
  116. uvs[1].y = 1.0f;
  117. uvs[2].y = 0.0f;
  118. uvs[3].y = 0.0f;
  119. graphics->flipped_sprite_buffer = gs_vertexbuffer_create(vbd, 0);
  120. if (!graphics->flipped_sprite_buffer)
  121. return false;
  122. return true;
  123. }
  124. static bool graphics_init(struct graphics_subsystem *graphics)
  125. {
  126. struct matrix4 top_mat;
  127. matrix4_identity(&top_mat);
  128. da_push_back(graphics->matrix_stack, &top_mat);
  129. graphics->exports.device_enter_context(graphics->device);
  130. thread_graphics = graphics;
  131. if (!graphics_init_immediate_vb(graphics))
  132. return false;
  133. if (!graphics_init_sprite_vbs(graphics))
  134. return false;
  135. if (pthread_mutex_init(&graphics->mutex, NULL) != 0)
  136. return false;
  137. if (pthread_mutex_init(&graphics->effect_mutex, NULL) != 0)
  138. return false;
  139. graphics->exports.device_blend_function_separate(graphics->device, GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA,
  140. GS_BLEND_ONE, GS_BLEND_INVSRCALPHA);
  141. graphics->cur_blend_state.enabled = true;
  142. graphics->cur_blend_state.src_c = GS_BLEND_SRCALPHA;
  143. graphics->cur_blend_state.dest_c = GS_BLEND_INVSRCALPHA;
  144. graphics->cur_blend_state.src_a = GS_BLEND_ONE;
  145. graphics->cur_blend_state.dest_a = GS_BLEND_INVSRCALPHA;
  146. graphics->cur_blend_state.op = GS_BLEND_OP_ADD;
  147. graphics->exports.device_blend_op(graphics->device, graphics->cur_blend_state.op);
  148. graphics->exports.device_leave_context(graphics->device);
  149. gs_init_image_deps();
  150. thread_graphics = NULL;
  151. return true;
  152. }
  153. int gs_create(graphics_t **pgraphics, const char *module, uint32_t adapter)
  154. {
  155. int errcode = GS_ERROR_FAIL;
  156. graphics_t *graphics = bzalloc(sizeof(struct graphics_subsystem));
  157. pthread_mutex_init_value(&graphics->mutex);
  158. pthread_mutex_init_value(&graphics->effect_mutex);
  159. graphics->module = os_dlopen(module);
  160. if (!graphics->module) {
  161. errcode = GS_ERROR_MODULE_NOT_FOUND;
  162. goto error;
  163. }
  164. if (!load_graphics_imports(&graphics->exports, graphics->module, module))
  165. goto error;
  166. errcode = graphics->exports.device_create(&graphics->device, adapter);
  167. if (errcode != GS_SUCCESS)
  168. goto error;
  169. if (!graphics_init(graphics)) {
  170. errcode = GS_ERROR_FAIL;
  171. goto error;
  172. }
  173. *pgraphics = graphics;
  174. return errcode;
  175. error:
  176. gs_destroy(graphics);
  177. return errcode;
  178. }
  179. extern void gs_effect_actually_destroy(gs_effect_t *effect);
  180. void gs_destroy(graphics_t *graphics)
  181. {
  182. if (!ptr_valid(graphics, "gs_destroy"))
  183. return;
  184. while (thread_graphics)
  185. gs_leave_context();
  186. if (graphics->device) {
  187. struct gs_effect *effect = graphics->first_effect;
  188. thread_graphics = graphics;
  189. graphics->exports.device_enter_context(graphics->device);
  190. while (effect) {
  191. struct gs_effect *next = effect->next;
  192. gs_effect_actually_destroy(effect);
  193. effect = next;
  194. }
  195. graphics->exports.gs_vertexbuffer_destroy(graphics->subregion_buffer);
  196. graphics->exports.gs_vertexbuffer_destroy(graphics->flipped_sprite_buffer);
  197. graphics->exports.gs_vertexbuffer_destroy(graphics->sprite_buffer);
  198. graphics->exports.gs_vertexbuffer_destroy(graphics->immediate_vertbuffer);
  199. graphics->exports.device_destroy(graphics->device);
  200. thread_graphics = NULL;
  201. }
  202. pthread_mutex_destroy(&graphics->mutex);
  203. pthread_mutex_destroy(&graphics->effect_mutex);
  204. da_free(graphics->matrix_stack);
  205. da_free(graphics->viewport_stack);
  206. da_free(graphics->blend_state_stack);
  207. if (graphics->module)
  208. os_dlclose(graphics->module);
  209. bfree(graphics);
  210. gs_free_image_deps();
  211. }
  212. void gs_enter_context(graphics_t *graphics)
  213. {
  214. if (!ptr_valid(graphics, "gs_enter_context"))
  215. return;
  216. bool is_current = thread_graphics == graphics;
  217. if (thread_graphics && !is_current) {
  218. while (thread_graphics)
  219. gs_leave_context();
  220. }
  221. if (!is_current) {
  222. pthread_mutex_lock(&graphics->mutex);
  223. graphics->exports.device_enter_context(graphics->device);
  224. thread_graphics = graphics;
  225. }
  226. os_atomic_inc_long(&graphics->ref);
  227. }
  228. void gs_leave_context(void)
  229. {
  230. if (gs_valid("gs_leave_context")) {
  231. if (!os_atomic_dec_long(&thread_graphics->ref)) {
  232. graphics_t *graphics = thread_graphics;
  233. graphics->exports.device_leave_context(graphics->device);
  234. pthread_mutex_unlock(&graphics->mutex);
  235. thread_graphics = NULL;
  236. }
  237. }
  238. }
  239. graphics_t *gs_get_context(void)
  240. {
  241. return thread_graphics;
  242. }
  243. void *gs_get_device_obj(void)
  244. {
  245. if (!gs_valid("gs_get_device_obj"))
  246. return NULL;
  247. return thread_graphics->exports.device_get_device_obj(thread_graphics->device);
  248. }
  249. const char *gs_get_device_name(void)
  250. {
  251. return gs_valid("gs_get_device_name") ? thread_graphics->exports.device_get_name() : NULL;
  252. }
  253. const char *gs_get_driver_version(void)
  254. {
  255. graphics_t *graphics = thread_graphics;
  256. if (!gs_valid("gs_get_driver_version"))
  257. return NULL;
  258. if (graphics->exports.gpu_get_driver_version)
  259. return (graphics->exports.gpu_get_driver_version());
  260. else
  261. return NULL;
  262. }
  263. const char *gs_get_renderer(void)
  264. {
  265. graphics_t *graphics = thread_graphics;
  266. if (!gs_valid("gs_get_renderer"))
  267. return NULL;
  268. if (graphics->exports.gpu_get_renderer)
  269. return (graphics->exports.gpu_get_renderer());
  270. else
  271. return NULL;
  272. }
  273. uint64_t gs_get_gpu_dmem(void)
  274. {
  275. graphics_t *graphics = thread_graphics;
  276. if (!gs_valid("gs_get_gpu_dmem"))
  277. return 0;
  278. if (graphics->exports.gpu_get_dmem)
  279. return (graphics->exports.gpu_get_dmem());
  280. else
  281. return 0;
  282. }
  283. uint64_t gs_get_gpu_smem(void)
  284. {
  285. graphics_t *graphics = thread_graphics;
  286. if (!gs_valid("gs_get_gpu_smem"))
  287. return 0;
  288. if (graphics->exports.gpu_get_smem)
  289. return (graphics->exports.gpu_get_smem());
  290. else
  291. return 0;
  292. }
  293. int gs_get_device_type(void)
  294. {
  295. return gs_valid("gs_get_device_type") ? thread_graphics->exports.device_get_type() : -1;
  296. }
  297. static inline struct matrix4 *top_matrix(graphics_t *graphics)
  298. {
  299. return graphics->matrix_stack.array + graphics->cur_matrix;
  300. }
  301. void gs_matrix_push(void)
  302. {
  303. graphics_t *graphics = thread_graphics;
  304. if (!gs_valid("gs_matrix_push"))
  305. return;
  306. struct matrix4 mat, *top_mat = top_matrix(graphics);
  307. memcpy(&mat, top_mat, sizeof(struct matrix4));
  308. da_push_back(graphics->matrix_stack, &mat);
  309. graphics->cur_matrix++;
  310. }
  311. void gs_matrix_pop(void)
  312. {
  313. graphics_t *graphics = thread_graphics;
  314. if (!gs_valid("gs_matrix_pop"))
  315. return;
  316. if (graphics->cur_matrix == 0) {
  317. blog(LOG_ERROR, "Tried to pop last matrix on stack");
  318. return;
  319. }
  320. da_erase(graphics->matrix_stack, graphics->cur_matrix);
  321. graphics->cur_matrix--;
  322. }
  323. void gs_matrix_identity(void)
  324. {
  325. struct matrix4 *top_mat;
  326. if (!gs_valid("gs_matrix_identity"))
  327. return;
  328. top_mat = top_matrix(thread_graphics);
  329. if (top_mat)
  330. matrix4_identity(top_mat);
  331. }
  332. void gs_matrix_transpose(void)
  333. {
  334. struct matrix4 *top_mat;
  335. if (!gs_valid("gs_matrix_transpose"))
  336. return;
  337. top_mat = top_matrix(thread_graphics);
  338. if (top_mat)
  339. matrix4_transpose(top_mat, top_mat);
  340. }
  341. void gs_matrix_set(const struct matrix4 *matrix)
  342. {
  343. struct matrix4 *top_mat;
  344. if (!gs_valid("gs_matrix_set"))
  345. return;
  346. top_mat = top_matrix(thread_graphics);
  347. if (top_mat)
  348. matrix4_copy(top_mat, matrix);
  349. }
  350. void gs_matrix_get(struct matrix4 *dst)
  351. {
  352. struct matrix4 *top_mat;
  353. if (!gs_valid("gs_matrix_get"))
  354. return;
  355. top_mat = top_matrix(thread_graphics);
  356. if (top_mat)
  357. matrix4_copy(dst, top_mat);
  358. }
  359. void gs_matrix_mul(const struct matrix4 *matrix)
  360. {
  361. struct matrix4 *top_mat;
  362. if (!gs_valid("gs_matrix_mul"))
  363. return;
  364. top_mat = top_matrix(thread_graphics);
  365. if (top_mat)
  366. matrix4_mul(top_mat, matrix, top_mat);
  367. }
  368. void gs_matrix_rotquat(const struct quat *rot)
  369. {
  370. struct matrix4 *top_mat;
  371. if (!gs_valid("gs_matrix_rotquat"))
  372. return;
  373. top_mat = top_matrix(thread_graphics);
  374. if (top_mat)
  375. matrix4_rotate_i(top_mat, rot, top_mat);
  376. }
  377. void gs_matrix_rotaa(const struct axisang *rot)
  378. {
  379. struct matrix4 *top_mat;
  380. if (!gs_valid("gs_matrix_rotaa"))
  381. return;
  382. top_mat = top_matrix(thread_graphics);
  383. if (top_mat)
  384. matrix4_rotate_aa_i(top_mat, rot, top_mat);
  385. }
  386. void gs_matrix_translate(const struct vec3 *pos)
  387. {
  388. struct matrix4 *top_mat;
  389. if (!gs_valid("gs_matrix_translate"))
  390. return;
  391. top_mat = top_matrix(thread_graphics);
  392. if (top_mat)
  393. matrix4_translate3v_i(top_mat, pos, top_mat);
  394. }
  395. void gs_matrix_scale(const struct vec3 *scale)
  396. {
  397. struct matrix4 *top_mat;
  398. if (!gs_valid("gs_matrix_scale"))
  399. return;
  400. top_mat = top_matrix(thread_graphics);
  401. if (top_mat)
  402. matrix4_scale_i(top_mat, scale, top_mat);
  403. }
  404. void gs_matrix_rotaa4f(float x, float y, float z, float angle)
  405. {
  406. struct matrix4 *top_mat;
  407. struct axisang aa;
  408. if (!gs_valid("gs_matrix_rotaa4f"))
  409. return;
  410. top_mat = top_matrix(thread_graphics);
  411. if (top_mat) {
  412. axisang_set(&aa, x, y, z, angle);
  413. matrix4_rotate_aa_i(top_mat, &aa, top_mat);
  414. }
  415. }
  416. void gs_matrix_translate3f(float x, float y, float z)
  417. {
  418. struct matrix4 *top_mat;
  419. struct vec3 p;
  420. if (!gs_valid("gs_matrix_translate3f"))
  421. return;
  422. top_mat = top_matrix(thread_graphics);
  423. if (top_mat) {
  424. vec3_set(&p, x, y, z);
  425. matrix4_translate3v_i(top_mat, &p, top_mat);
  426. }
  427. }
  428. void gs_matrix_scale3f(float x, float y, float z)
  429. {
  430. struct matrix4 *top_mat = top_matrix(thread_graphics);
  431. struct vec3 p;
  432. if (top_mat) {
  433. vec3_set(&p, x, y, z);
  434. matrix4_scale_i(top_mat, &p, top_mat);
  435. }
  436. }
  437. static inline void reset_immediate_arrays(graphics_t *graphics)
  438. {
  439. da_init(graphics->verts);
  440. da_init(graphics->norms);
  441. da_init(graphics->colors);
  442. for (size_t i = 0; i < 16; i++)
  443. da_init(graphics->texverts[i]);
  444. }
  445. void gs_render_start(bool b_new)
  446. {
  447. graphics_t *graphics = thread_graphics;
  448. if (!gs_valid("gs_render_start"))
  449. return;
  450. graphics->using_immediate = !b_new;
  451. reset_immediate_arrays(graphics);
  452. if (b_new) {
  453. graphics->vbd = gs_vbdata_create();
  454. } else {
  455. graphics->vbd = gs_vertexbuffer_get_data(graphics->immediate_vertbuffer);
  456. memset(graphics->vbd->colors, 0xFF, sizeof(uint32_t) * IMMEDIATE_COUNT);
  457. graphics->verts.array = graphics->vbd->points;
  458. graphics->norms.array = graphics->vbd->normals;
  459. graphics->colors.array = graphics->vbd->colors;
  460. graphics->texverts[0].array = graphics->vbd->tvarray[0].array;
  461. graphics->verts.capacity = IMMEDIATE_COUNT;
  462. graphics->norms.capacity = IMMEDIATE_COUNT;
  463. graphics->colors.capacity = IMMEDIATE_COUNT;
  464. graphics->texverts[0].capacity = IMMEDIATE_COUNT;
  465. }
  466. }
  467. static inline size_t min_size(const size_t a, const size_t b)
  468. {
  469. return (a < b) ? a : b;
  470. }
  471. void gs_render_stop(enum gs_draw_mode mode)
  472. {
  473. graphics_t *graphics = thread_graphics;
  474. size_t i, num;
  475. if (!gs_valid("gs_render_stop"))
  476. return;
  477. num = graphics->verts.num;
  478. if (!num) {
  479. if (!graphics->using_immediate) {
  480. da_free(graphics->verts);
  481. da_free(graphics->norms);
  482. da_free(graphics->colors);
  483. for (i = 0; i < 16; i++)
  484. da_free(graphics->texverts[i]);
  485. gs_vbdata_destroy(graphics->vbd);
  486. }
  487. return;
  488. }
  489. if (graphics->norms.num && (graphics->norms.num != graphics->verts.num)) {
  490. blog(LOG_ERROR, "gs_render_stop: normal count does "
  491. "not match vertex count");
  492. num = min_size(num, graphics->norms.num);
  493. }
  494. if (graphics->colors.num && (graphics->colors.num != graphics->verts.num)) {
  495. blog(LOG_ERROR, "gs_render_stop: color count does "
  496. "not match vertex count");
  497. num = min_size(num, graphics->colors.num);
  498. }
  499. if (graphics->texverts[0].num && (graphics->texverts[0].num != graphics->verts.num)) {
  500. blog(LOG_ERROR, "gs_render_stop: texture vertex count does "
  501. "not match vertex count");
  502. num = min_size(num, graphics->texverts[0].num);
  503. }
  504. if (graphics->using_immediate) {
  505. gs_vertexbuffer_flush(graphics->immediate_vertbuffer);
  506. gs_load_vertexbuffer(graphics->immediate_vertbuffer);
  507. gs_load_indexbuffer(NULL);
  508. gs_draw(mode, 0, (uint32_t)num);
  509. reset_immediate_arrays(graphics);
  510. } else {
  511. gs_vertbuffer_t *vb = gs_render_save();
  512. gs_load_vertexbuffer(vb);
  513. gs_load_indexbuffer(NULL);
  514. gs_draw(mode, 0, 0);
  515. gs_vertexbuffer_destroy(vb);
  516. }
  517. graphics->vbd = NULL;
  518. }
  519. gs_vertbuffer_t *gs_render_save(void)
  520. {
  521. graphics_t *graphics = thread_graphics;
  522. size_t num_tex, i;
  523. if (!gs_valid("gs_render_save"))
  524. return NULL;
  525. if (graphics->using_immediate)
  526. return NULL;
  527. if (!graphics->verts.num) {
  528. gs_vbdata_destroy(graphics->vbd);
  529. return NULL;
  530. }
  531. for (num_tex = 0; num_tex < 16; num_tex++)
  532. if (!graphics->texverts[num_tex].num)
  533. break;
  534. graphics->vbd->points = graphics->verts.array;
  535. graphics->vbd->normals = graphics->norms.array;
  536. graphics->vbd->colors = graphics->colors.array;
  537. graphics->vbd->num = graphics->verts.num;
  538. graphics->vbd->num_tex = num_tex;
  539. if (graphics->vbd->num_tex) {
  540. graphics->vbd->tvarray = bmalloc(sizeof(struct gs_tvertarray) * num_tex);
  541. for (i = 0; i < num_tex; i++) {
  542. graphics->vbd->tvarray[i].width = 2;
  543. graphics->vbd->tvarray[i].array = graphics->texverts[i].array;
  544. }
  545. }
  546. reset_immediate_arrays(graphics);
  547. return gs_vertexbuffer_create(graphics->vbd, 0);
  548. }
  549. void gs_vertex2f(float x, float y)
  550. {
  551. struct vec3 v3;
  552. vec3_set(&v3, x, y, 0.0f);
  553. gs_vertex3v(&v3);
  554. }
  555. void gs_vertex3f(float x, float y, float z)
  556. {
  557. struct vec3 v3;
  558. vec3_set(&v3, x, y, z);
  559. gs_vertex3v(&v3);
  560. }
  561. void gs_normal3f(float x, float y, float z)
  562. {
  563. struct vec3 v3;
  564. vec3_set(&v3, x, y, z);
  565. gs_normal3v(&v3);
  566. }
  567. static inline bool validvertsize(graphics_t *graphics, size_t num, const char *name)
  568. {
  569. if (graphics->using_immediate && num == IMMEDIATE_COUNT) {
  570. blog(LOG_ERROR,
  571. "%s: tried to use over %u "
  572. "for immediate rendering",
  573. name, IMMEDIATE_COUNT);
  574. return false;
  575. }
  576. return true;
  577. }
  578. void gs_color(uint32_t color)
  579. {
  580. graphics_t *graphics = thread_graphics;
  581. if (!gs_valid("gs_color"))
  582. return;
  583. if (!validvertsize(graphics, graphics->colors.num, "gs_color"))
  584. return;
  585. da_push_back(graphics->colors, &color);
  586. }
  587. void gs_texcoord(float x, float y, int unit)
  588. {
  589. struct vec2 v2;
  590. vec2_set(&v2, x, y);
  591. gs_texcoord2v(&v2, unit);
  592. }
  593. void gs_vertex2v(const struct vec2 *v)
  594. {
  595. struct vec3 v3;
  596. vec3_set(&v3, v->x, v->y, 0.0f);
  597. gs_vertex3v(&v3);
  598. }
  599. void gs_vertex3v(const struct vec3 *v)
  600. {
  601. graphics_t *graphics = thread_graphics;
  602. if (!gs_valid("gs_vertex3v"))
  603. return;
  604. if (!validvertsize(graphics, graphics->verts.num, "gs_vertex"))
  605. return;
  606. da_push_back(graphics->verts, v);
  607. }
  608. void gs_normal3v(const struct vec3 *v)
  609. {
  610. graphics_t *graphics = thread_graphics;
  611. if (!gs_valid("gs_normal3v"))
  612. return;
  613. if (!validvertsize(graphics, graphics->norms.num, "gs_normal"))
  614. return;
  615. da_push_back(graphics->norms, v);
  616. }
  617. void gs_color4v(const struct vec4 *v)
  618. {
  619. /* TODO */
  620. UNUSED_PARAMETER(v);
  621. }
  622. void gs_texcoord2v(const struct vec2 *v, int unit)
  623. {
  624. graphics_t *graphics = thread_graphics;
  625. if (!gs_valid("gs_texcoord2v"))
  626. return;
  627. if (!validvertsize(graphics, graphics->texverts[unit].num, "gs_texcoord"))
  628. return;
  629. da_push_back(graphics->texverts[unit], v);
  630. }
  631. input_t *gs_get_input(void)
  632. {
  633. /* TODO */
  634. return NULL;
  635. }
  636. gs_effect_t *gs_get_effect(void)
  637. {
  638. if (!gs_valid("gs_get_effect"))
  639. return NULL;
  640. return thread_graphics ? thread_graphics->cur_effect : NULL;
  641. }
  642. static inline struct gs_effect *find_cached_effect(const char *filename)
  643. {
  644. struct gs_effect *effect = thread_graphics->first_effect;
  645. while (effect) {
  646. if (strcmp(effect->effect_path, filename) == 0)
  647. break;
  648. effect = effect->next;
  649. }
  650. return effect;
  651. }
  652. gs_effect_t *gs_effect_create_from_file(const char *file, char **error_string)
  653. {
  654. char *file_string;
  655. gs_effect_t *effect = NULL;
  656. if (!gs_valid_p("gs_effect_create_from_file", file))
  657. return NULL;
  658. effect = find_cached_effect(file);
  659. if (effect)
  660. return effect;
  661. file_string = os_quick_read_utf8_file(file);
  662. if (!file_string) {
  663. blog(LOG_ERROR, "Could not load effect file '%s'", file);
  664. return NULL;
  665. }
  666. effect = gs_effect_create(file_string, file, error_string);
  667. bfree(file_string);
  668. return effect;
  669. }
  670. gs_effect_t *gs_effect_create(const char *effect_string, const char *filename, char **error_string)
  671. {
  672. if (!gs_valid_p("gs_effect_create", effect_string))
  673. return NULL;
  674. struct gs_effect *effect = bzalloc(sizeof(struct gs_effect));
  675. struct effect_parser parser;
  676. bool success;
  677. effect->graphics = thread_graphics;
  678. effect->effect_path = bstrdup(filename);
  679. ep_init(&parser);
  680. success = ep_parse(&parser, effect, effect_string, filename);
  681. if (!success) {
  682. if (error_string)
  683. *error_string = error_data_buildstring(&parser.cfp.error_list);
  684. gs_effect_destroy(effect);
  685. effect = NULL;
  686. }
  687. if (effect) {
  688. pthread_mutex_lock(&thread_graphics->effect_mutex);
  689. if (effect->effect_path) {
  690. effect->cached = true;
  691. effect->next = thread_graphics->first_effect;
  692. thread_graphics->first_effect = effect;
  693. }
  694. pthread_mutex_unlock(&thread_graphics->effect_mutex);
  695. }
  696. ep_free(&parser);
  697. return effect;
  698. }
  699. gs_shader_t *gs_vertexshader_create_from_file(const char *file, char **error_string)
  700. {
  701. if (!gs_valid_p("gs_vertexshader_create_from_file", file))
  702. return NULL;
  703. char *file_string;
  704. gs_shader_t *shader = NULL;
  705. file_string = os_quick_read_utf8_file(file);
  706. if (!file_string) {
  707. blog(LOG_ERROR, "Could not load vertex shader file '%s'", file);
  708. return NULL;
  709. }
  710. shader = gs_vertexshader_create(file_string, file, error_string);
  711. bfree(file_string);
  712. return shader;
  713. }
  714. gs_shader_t *gs_pixelshader_create_from_file(const char *file, char **error_string)
  715. {
  716. char *file_string;
  717. gs_shader_t *shader = NULL;
  718. if (!gs_valid_p("gs_pixelshader_create_from_file", file))
  719. return NULL;
  720. file_string = os_quick_read_utf8_file(file);
  721. if (!file_string) {
  722. blog(LOG_ERROR, "Could not load pixel shader file '%s'", file);
  723. return NULL;
  724. }
  725. shader = gs_pixelshader_create(file_string, file, error_string);
  726. bfree(file_string);
  727. return shader;
  728. }
  729. gs_texture_t *gs_texture_create_from_file(const char *file)
  730. {
  731. enum gs_color_format format;
  732. uint32_t cx;
  733. uint32_t cy;
  734. uint8_t *data = gs_create_texture_file_data(file, &format, &cx, &cy);
  735. gs_texture_t *tex = NULL;
  736. if (data) {
  737. tex = gs_texture_create(cx, cy, format, 1, (const uint8_t **)&data, 0);
  738. bfree(data);
  739. }
  740. return tex;
  741. }
  742. static inline void assign_sprite_rect(float *start, float *end, float size, bool flip)
  743. {
  744. if (!flip) {
  745. *start = 0.0f;
  746. *end = size;
  747. } else {
  748. *start = size;
  749. *end = 0.0f;
  750. }
  751. }
  752. static inline void assign_sprite_uv(float *start, float *end, bool flip)
  753. {
  754. if (!flip) {
  755. *start = 0.0f;
  756. *end = 1.0f;
  757. } else {
  758. *start = 1.0f;
  759. *end = 0.0f;
  760. }
  761. }
  762. static void build_sprite(struct gs_vb_data *data, float fcx, float fcy, float start_u, float end_u, float start_v,
  763. float end_v)
  764. {
  765. struct vec2 *tvarray = data->tvarray[0].array;
  766. vec3_zero(data->points);
  767. vec3_set(data->points + 1, fcx, 0.0f, 0.0f);
  768. vec3_set(data->points + 2, 0.0f, fcy, 0.0f);
  769. vec3_set(data->points + 3, fcx, fcy, 0.0f);
  770. vec2_set(tvarray, start_u, start_v);
  771. vec2_set(tvarray + 1, end_u, start_v);
  772. vec2_set(tvarray + 2, start_u, end_v);
  773. vec2_set(tvarray + 3, end_u, end_v);
  774. }
  775. static inline void build_sprite_norm(struct gs_vb_data *data, float fcx, float fcy, uint32_t flip)
  776. {
  777. float start_u, end_u;
  778. float start_v, end_v;
  779. assign_sprite_uv(&start_u, &end_u, (flip & GS_FLIP_U) != 0);
  780. assign_sprite_uv(&start_v, &end_v, (flip & GS_FLIP_V) != 0);
  781. build_sprite(data, fcx, fcy, start_u, end_u, start_v, end_v);
  782. }
  783. static inline void build_subsprite_norm(struct gs_vb_data *data, float fsub_x, float fsub_y, float fsub_cx,
  784. float fsub_cy, float fcx, float fcy, uint32_t flip)
  785. {
  786. float start_u, end_u;
  787. float start_v, end_v;
  788. if ((flip & GS_FLIP_U) == 0) {
  789. start_u = fsub_x / fcx;
  790. end_u = (fsub_x + fsub_cx) / fcx;
  791. } else {
  792. start_u = (fsub_x + fsub_cx) / fcx;
  793. end_u = fsub_x / fcx;
  794. }
  795. if ((flip & GS_FLIP_V) == 0) {
  796. start_v = fsub_y / fcy;
  797. end_v = (fsub_y + fsub_cy) / fcy;
  798. } else {
  799. start_v = (fsub_y + fsub_cy) / fcy;
  800. end_v = fsub_y / fcy;
  801. }
  802. build_sprite(data, fsub_cx, fsub_cy, start_u, end_u, start_v, end_v);
  803. }
  804. static inline void build_sprite_rect(struct gs_vb_data *data, gs_texture_t *tex, float fcx, float fcy, uint32_t flip)
  805. {
  806. float start_u, end_u;
  807. float start_v, end_v;
  808. float width = (float)gs_texture_get_width(tex);
  809. float height = (float)gs_texture_get_height(tex);
  810. assign_sprite_rect(&start_u, &end_u, width, (flip & GS_FLIP_U) != 0);
  811. assign_sprite_rect(&start_v, &end_v, height, (flip & GS_FLIP_V) != 0);
  812. build_sprite(data, fcx, fcy, start_u, end_u, start_v, end_v);
  813. }
  814. void gs_draw_quadf(gs_texture_t *tex, uint32_t flip, float width, float height)
  815. {
  816. graphics_t *graphics = thread_graphics;
  817. float fcx, fcy;
  818. struct gs_vb_data *data;
  819. if (tex) {
  820. if (gs_get_texture_type(tex) != GS_TEXTURE_2D) {
  821. blog(LOG_ERROR, "A sprite must be a 2D texture");
  822. return;
  823. }
  824. } else {
  825. if (width == 0.0f || height == 0.0f) {
  826. blog(LOG_ERROR, "A sprite cannot be drawn without "
  827. "a width/height");
  828. return;
  829. }
  830. }
  831. fcx = width != 0.0f ? width : (float)gs_texture_get_width(tex);
  832. fcy = height != 0.0f ? height : (float)gs_texture_get_height(tex);
  833. gs_matrix_push();
  834. gs_matrix_scale3f(fcx, fcy, 1.0f);
  835. gs_load_indexbuffer(NULL);
  836. if (tex && gs_texture_is_rect(tex)) {
  837. data = gs_vertexbuffer_get_data(graphics->subregion_buffer);
  838. build_sprite_rect(data, tex, 1.0f, 1.0f, flip);
  839. gs_vertexbuffer_flush(graphics->subregion_buffer);
  840. gs_load_vertexbuffer(graphics->subregion_buffer);
  841. gs_draw(GS_TRISTRIP, 0, 0);
  842. } else {
  843. gs_load_vertexbuffer(flip ? graphics->flipped_sprite_buffer : graphics->sprite_buffer);
  844. gs_draw(GS_TRISTRIP, 0, 0);
  845. }
  846. gs_matrix_pop();
  847. }
  848. void gs_draw_sprite(gs_texture_t *tex, uint32_t flip, uint32_t width, uint32_t height)
  849. {
  850. gs_draw_quadf(tex, flip, (float)width, (float)height);
  851. }
  852. void gs_draw_sprite_subregion(gs_texture_t *tex, uint32_t flip, uint32_t sub_x, uint32_t sub_y, uint32_t sub_cx,
  853. uint32_t sub_cy)
  854. {
  855. graphics_t *graphics = thread_graphics;
  856. uint32_t cx, cy;
  857. float fcx, fcy;
  858. struct gs_vb_data *data;
  859. if (tex) {
  860. if (gs_get_texture_type(tex) != GS_TEXTURE_2D) {
  861. blog(LOG_ERROR, "A sprite must be a 2D texture");
  862. return;
  863. }
  864. }
  865. cx = gs_texture_get_width(tex);
  866. cy = gs_texture_get_height(tex);
  867. if (sub_x == 0 && sub_y == 0 && sub_cx == cx && sub_cy == cy) {
  868. gs_draw_sprite(tex, flip, 0, 0);
  869. return;
  870. }
  871. fcx = (float)cx;
  872. fcy = (float)cy;
  873. data = gs_vertexbuffer_get_data(graphics->subregion_buffer);
  874. build_subsprite_norm(data, (float)sub_x, (float)sub_y, (float)sub_cx, (float)sub_cy, fcx, fcy, flip);
  875. gs_vertexbuffer_flush(graphics->subregion_buffer);
  876. gs_load_vertexbuffer(graphics->subregion_buffer);
  877. gs_load_indexbuffer(NULL);
  878. gs_draw(GS_TRISTRIP, 0, 0);
  879. }
  880. void gs_draw_cube_backdrop(gs_texture_t *cubetex, const struct quat *rot, float left, float right, float top,
  881. float bottom, float znear)
  882. {
  883. /* TODO */
  884. UNUSED_PARAMETER(cubetex);
  885. UNUSED_PARAMETER(rot);
  886. UNUSED_PARAMETER(left);
  887. UNUSED_PARAMETER(right);
  888. UNUSED_PARAMETER(top);
  889. UNUSED_PARAMETER(bottom);
  890. UNUSED_PARAMETER(znear);
  891. }
  892. void gs_reset_viewport(void)
  893. {
  894. uint32_t cx, cy;
  895. if (!gs_valid("gs_reset_viewport"))
  896. return;
  897. gs_get_size(&cx, &cy);
  898. gs_set_viewport(0, 0, (int)cx, (int)cy);
  899. }
  900. void gs_set_2d_mode(void)
  901. {
  902. uint32_t cx, cy;
  903. if (!gs_valid("gs_set_2d_mode"))
  904. return;
  905. gs_get_size(&cx, &cy);
  906. gs_ortho(0.0f, (float)cx, 0.0f, (float)cy, -1.0, -1024.0f);
  907. }
  908. void gs_set_3d_mode(double fovy, double znear, double zvar)
  909. {
  910. /* TODO */
  911. UNUSED_PARAMETER(fovy);
  912. UNUSED_PARAMETER(znear);
  913. UNUSED_PARAMETER(zvar);
  914. }
  915. void gs_viewport_push(void)
  916. {
  917. if (!gs_valid("gs_viewport_push"))
  918. return;
  919. struct gs_rect *rect = da_push_back_new(thread_graphics->viewport_stack);
  920. gs_get_viewport(rect);
  921. }
  922. void gs_viewport_pop(void)
  923. {
  924. struct gs_rect *rect;
  925. if (!gs_valid("gs_viewport_pop"))
  926. return;
  927. if (!thread_graphics->viewport_stack.num)
  928. return;
  929. rect = da_end(thread_graphics->viewport_stack);
  930. gs_set_viewport(rect->x, rect->y, rect->cx, rect->cy);
  931. da_pop_back(thread_graphics->viewport_stack);
  932. }
  933. void gs_texture_set_image(gs_texture_t *tex, const uint8_t *data, uint32_t linesize, bool flip)
  934. {
  935. uint8_t *ptr;
  936. uint32_t linesize_out;
  937. size_t row_copy;
  938. size_t height;
  939. if (!gs_valid_p2("gs_texture_set_image", tex, data))
  940. return;
  941. if (!gs_texture_map(tex, &ptr, &linesize_out))
  942. return;
  943. row_copy = (linesize < linesize_out) ? linesize : linesize_out;
  944. height = gs_texture_get_height(tex);
  945. if (flip) {
  946. uint8_t *const end = ptr + height * linesize_out;
  947. data += (height - 1) * linesize;
  948. while (ptr < end) {
  949. memcpy(ptr, data, row_copy);
  950. ptr += linesize_out;
  951. data -= linesize;
  952. }
  953. } else if (linesize == linesize_out) {
  954. memcpy(ptr, data, row_copy * height);
  955. } else {
  956. uint8_t *const end = ptr + height * linesize_out;
  957. while (ptr < end) {
  958. memcpy(ptr, data, row_copy);
  959. ptr += linesize_out;
  960. data += linesize;
  961. }
  962. }
  963. gs_texture_unmap(tex);
  964. }
  965. void gs_cubetexture_set_image(gs_texture_t *cubetex, uint32_t side, const void *data, uint32_t linesize, bool invert)
  966. {
  967. /* TODO */
  968. UNUSED_PARAMETER(cubetex);
  969. UNUSED_PARAMETER(side);
  970. UNUSED_PARAMETER(data);
  971. UNUSED_PARAMETER(linesize);
  972. UNUSED_PARAMETER(invert);
  973. }
  974. void gs_perspective(float angle, float aspect, float near, float far)
  975. {
  976. graphics_t *graphics = thread_graphics;
  977. float xmin, xmax, ymin, ymax;
  978. if (!gs_valid("gs_perspective"))
  979. return;
  980. ymax = near * tanf(RAD(angle) * 0.5f);
  981. ymin = -ymax;
  982. xmin = ymin * aspect;
  983. xmax = ymax * aspect;
  984. graphics->exports.device_frustum(graphics->device, xmin, xmax, ymin, ymax, near, far);
  985. }
  986. void gs_blend_state_push(void)
  987. {
  988. graphics_t *graphics = thread_graphics;
  989. if (!gs_valid("gs_blend_state_push"))
  990. return;
  991. da_push_back(graphics->blend_state_stack, &graphics->cur_blend_state);
  992. }
  993. void gs_blend_state_pop(void)
  994. {
  995. graphics_t *graphics = thread_graphics;
  996. struct blend_state *state;
  997. if (!gs_valid("gs_blend_state_pop"))
  998. return;
  999. state = da_end(graphics->blend_state_stack);
  1000. if (!state)
  1001. return;
  1002. gs_enable_blending(state->enabled);
  1003. gs_blend_function_separate(state->src_c, state->dest_c, state->src_a, state->dest_a);
  1004. gs_blend_op(state->op);
  1005. da_pop_back(graphics->blend_state_stack);
  1006. }
  1007. void gs_reset_blend_state(void)
  1008. {
  1009. graphics_t *graphics = thread_graphics;
  1010. if (!gs_valid("gs_preprocessor_name"))
  1011. return;
  1012. if (!graphics->cur_blend_state.enabled)
  1013. gs_enable_blending(true);
  1014. if (graphics->cur_blend_state.src_c != GS_BLEND_SRCALPHA ||
  1015. graphics->cur_blend_state.dest_c != GS_BLEND_INVSRCALPHA ||
  1016. graphics->cur_blend_state.src_a != GS_BLEND_ONE ||
  1017. graphics->cur_blend_state.dest_a != GS_BLEND_INVSRCALPHA) {
  1018. gs_blend_function_separate(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA, GS_BLEND_ONE, GS_BLEND_INVSRCALPHA);
  1019. gs_blend_op(GS_BLEND_OP_ADD);
  1020. }
  1021. }
  1022. /* ------------------------------------------------------------------------- */
  1023. const char *gs_preprocessor_name(void)
  1024. {
  1025. graphics_t *graphics = thread_graphics;
  1026. if (!gs_valid("gs_preprocessor_name"))
  1027. return NULL;
  1028. return graphics->exports.device_preprocessor_name();
  1029. }
  1030. gs_swapchain_t *gs_swapchain_create(const struct gs_init_data *data)
  1031. {
  1032. struct gs_init_data new_data = *data;
  1033. graphics_t *graphics = thread_graphics;
  1034. if (!gs_valid_p("gs_swapchain_create", data))
  1035. return NULL;
  1036. if (new_data.num_backbuffers == 0)
  1037. new_data.num_backbuffers = 1;
  1038. return graphics->exports.device_swapchain_create(graphics->device, &new_data);
  1039. }
  1040. void gs_resize(uint32_t x, uint32_t y)
  1041. {
  1042. graphics_t *graphics = thread_graphics;
  1043. if (!gs_valid("gs_resize"))
  1044. return;
  1045. graphics->exports.device_resize(graphics->device, x, y);
  1046. }
  1047. void gs_update_color_space(void)
  1048. {
  1049. graphics_t *graphics = thread_graphics;
  1050. if (!gs_valid("gs_update_color_space"))
  1051. return;
  1052. graphics->exports.device_update_color_space(graphics->device);
  1053. }
  1054. void gs_get_size(uint32_t *x, uint32_t *y)
  1055. {
  1056. graphics_t *graphics = thread_graphics;
  1057. if (!gs_valid("gs_get_size"))
  1058. return;
  1059. graphics->exports.device_get_size(graphics->device, x, y);
  1060. }
  1061. uint32_t gs_get_width(void)
  1062. {
  1063. graphics_t *graphics = thread_graphics;
  1064. if (!gs_valid("gs_get_width"))
  1065. return 0;
  1066. return graphics->exports.device_get_width(graphics->device);
  1067. }
  1068. uint32_t gs_get_height(void)
  1069. {
  1070. graphics_t *graphics = thread_graphics;
  1071. if (!gs_valid("gs_get_height"))
  1072. return 0;
  1073. return graphics->exports.device_get_height(graphics->device);
  1074. }
  1075. static inline bool is_pow2(uint32_t size)
  1076. {
  1077. return size >= 2 && (size & (size - 1)) == 0;
  1078. }
  1079. gs_texture_t *gs_texture_create(uint32_t width, uint32_t height, enum gs_color_format color_format, uint32_t levels,
  1080. const uint8_t **data, uint32_t flags)
  1081. {
  1082. graphics_t *graphics = thread_graphics;
  1083. bool pow2tex = is_pow2(width) && is_pow2(height);
  1084. bool uses_mipmaps = (flags & GS_BUILD_MIPMAPS || levels != 1);
  1085. if (!gs_valid("gs_texture_create"))
  1086. return NULL;
  1087. if (uses_mipmaps && !pow2tex) {
  1088. blog(LOG_WARNING, "Cannot use mipmaps with a "
  1089. "non-power-of-two texture. Disabling "
  1090. "mipmaps for this texture.");
  1091. uses_mipmaps = false;
  1092. flags &= ~GS_BUILD_MIPMAPS;
  1093. levels = 1;
  1094. }
  1095. if (uses_mipmaps && flags & GS_RENDER_TARGET) {
  1096. blog(LOG_WARNING, "Cannot use mipmaps with render targets. "
  1097. "Disabling mipmaps for this texture.");
  1098. flags &= ~GS_BUILD_MIPMAPS;
  1099. levels = 1;
  1100. }
  1101. return graphics->exports.device_texture_create(graphics->device, width, height, color_format, levels, data,
  1102. flags);
  1103. }
  1104. #if defined(__linux__) || defined(__FreeBSD__) || defined(__DragonFly__)
  1105. gs_texture_t *gs_texture_create_from_dmabuf(unsigned int width, unsigned int height, uint32_t drm_format,
  1106. enum gs_color_format color_format, uint32_t n_planes, const int *fds,
  1107. const uint32_t *strides, const uint32_t *offsets, const uint64_t *modifiers)
  1108. {
  1109. graphics_t *graphics = thread_graphics;
  1110. return graphics->exports.device_texture_create_from_dmabuf(
  1111. graphics->device, width, height, drm_format, color_format, n_planes, fds, strides, offsets, modifiers);
  1112. }
  1113. bool gs_query_dmabuf_capabilities(enum gs_dmabuf_flags *dmabuf_flags, uint32_t **drm_formats, size_t *n_formats)
  1114. {
  1115. graphics_t *graphics = thread_graphics;
  1116. return graphics->exports.device_query_dmabuf_capabilities(graphics->device, dmabuf_flags, drm_formats,
  1117. n_formats);
  1118. }
  1119. bool gs_query_dmabuf_modifiers_for_format(uint32_t drm_format, uint64_t **modifiers, size_t *n_modifiers)
  1120. {
  1121. graphics_t *graphics = thread_graphics;
  1122. return graphics->exports.device_query_dmabuf_modifiers_for_format(graphics->device, drm_format, modifiers,
  1123. n_modifiers);
  1124. }
  1125. gs_texture_t *gs_texture_create_from_pixmap(uint32_t width, uint32_t height, enum gs_color_format color_format,
  1126. uint32_t target, void *pixmap)
  1127. {
  1128. graphics_t *graphics = thread_graphics;
  1129. return graphics->exports.device_texture_create_from_pixmap(graphics->device, width, height, color_format,
  1130. target, pixmap);
  1131. }
  1132. bool gs_query_sync_capabilities(void)
  1133. {
  1134. graphics_t *graphics = thread_graphics;
  1135. return graphics->exports.device_query_sync_capabilities(graphics->device);
  1136. }
  1137. gs_sync_t *gs_sync_create(void)
  1138. {
  1139. graphics_t *graphics = thread_graphics;
  1140. return graphics->exports.device_sync_create(graphics->device);
  1141. }
  1142. gs_sync_t *gs_sync_create_from_syncobj_timeline_point(int syncobj_fd, uint64_t timeline_point)
  1143. {
  1144. graphics_t *graphics = thread_graphics;
  1145. return graphics->exports.device_sync_create_from_syncobj_timeline_point(graphics->device, syncobj_fd,
  1146. timeline_point);
  1147. }
  1148. void gs_sync_destroy(gs_sync_t *sync)
  1149. {
  1150. graphics_t *graphics = thread_graphics;
  1151. return graphics->exports.device_sync_destroy(graphics->device, sync);
  1152. }
  1153. bool gs_sync_export_syncobj_timeline_point(gs_sync_t *sync, int syncobj_fd, uint64_t timeline_point)
  1154. {
  1155. graphics_t *graphics = thread_graphics;
  1156. return graphics->exports.device_sync_export_syncobj_timeline_point(graphics->device, sync, syncobj_fd,
  1157. timeline_point);
  1158. }
  1159. bool gs_sync_signal_syncobj_timeline_point(int syncobj_fd, uint64_t timeline_point)
  1160. {
  1161. graphics_t *graphics = thread_graphics;
  1162. return graphics->exports.device_sync_signal_syncobj_timeline_point(graphics->device, syncobj_fd,
  1163. timeline_point);
  1164. }
  1165. bool gs_sync_wait(gs_sync_t *sync)
  1166. {
  1167. graphics_t *graphics = thread_graphics;
  1168. return graphics->exports.device_sync_wait(graphics->device, sync);
  1169. }
  1170. #endif
  1171. gs_texture_t *gs_cubetexture_create(uint32_t size, enum gs_color_format color_format, uint32_t levels,
  1172. const uint8_t **data, uint32_t flags)
  1173. {
  1174. graphics_t *graphics = thread_graphics;
  1175. bool pow2tex = is_pow2(size);
  1176. bool uses_mipmaps = (flags & GS_BUILD_MIPMAPS || levels != 1);
  1177. if (!gs_valid("gs_cubetexture_create"))
  1178. return NULL;
  1179. if (uses_mipmaps && !pow2tex) {
  1180. blog(LOG_WARNING, "Cannot use mipmaps with a "
  1181. "non-power-of-two texture. Disabling "
  1182. "mipmaps for this texture.");
  1183. uses_mipmaps = false;
  1184. flags &= ~GS_BUILD_MIPMAPS;
  1185. levels = 1;
  1186. }
  1187. if (uses_mipmaps && flags & GS_RENDER_TARGET) {
  1188. blog(LOG_WARNING, "Cannot use mipmaps with render targets. "
  1189. "Disabling mipmaps for this texture.");
  1190. flags &= ~GS_BUILD_MIPMAPS;
  1191. levels = 1;
  1192. data = NULL;
  1193. }
  1194. return graphics->exports.device_cubetexture_create(graphics->device, size, color_format, levels, data, flags);
  1195. }
  1196. gs_texture_t *gs_voltexture_create(uint32_t width, uint32_t height, uint32_t depth, enum gs_color_format color_format,
  1197. uint32_t levels, const uint8_t **data, uint32_t flags)
  1198. {
  1199. graphics_t *graphics = thread_graphics;
  1200. if (!gs_valid("gs_voltexture_create"))
  1201. return NULL;
  1202. return graphics->exports.device_voltexture_create(graphics->device, width, height, depth, color_format, levels,
  1203. data, flags);
  1204. }
  1205. gs_zstencil_t *gs_zstencil_create(uint32_t width, uint32_t height, enum gs_zstencil_format format)
  1206. {
  1207. graphics_t *graphics = thread_graphics;
  1208. if (!gs_valid("gs_zstencil_create"))
  1209. return NULL;
  1210. return graphics->exports.device_zstencil_create(graphics->device, width, height, format);
  1211. }
  1212. gs_stagesurf_t *gs_stagesurface_create(uint32_t width, uint32_t height, enum gs_color_format color_format)
  1213. {
  1214. graphics_t *graphics = thread_graphics;
  1215. if (!gs_valid("gs_stagesurface_create"))
  1216. return NULL;
  1217. return graphics->exports.device_stagesurface_create(graphics->device, width, height, color_format);
  1218. }
  1219. gs_samplerstate_t *gs_samplerstate_create(const struct gs_sampler_info *info)
  1220. {
  1221. graphics_t *graphics = thread_graphics;
  1222. if (!gs_valid_p("gs_samplerstate_create", info))
  1223. return NULL;
  1224. return graphics->exports.device_samplerstate_create(graphics->device, info);
  1225. }
  1226. gs_shader_t *gs_vertexshader_create(const char *shader, const char *file, char **error_string)
  1227. {
  1228. graphics_t *graphics = thread_graphics;
  1229. if (!gs_valid_p("gs_vertexshader_create", shader))
  1230. return NULL;
  1231. return graphics->exports.device_vertexshader_create(graphics->device, shader, file, error_string);
  1232. }
  1233. gs_shader_t *gs_pixelshader_create(const char *shader, const char *file, char **error_string)
  1234. {
  1235. graphics_t *graphics = thread_graphics;
  1236. if (!gs_valid_p("gs_pixelshader_create", shader))
  1237. return NULL;
  1238. return graphics->exports.device_pixelshader_create(graphics->device, shader, file, error_string);
  1239. }
  1240. gs_vertbuffer_t *gs_vertexbuffer_create(struct gs_vb_data *data, uint32_t flags)
  1241. {
  1242. graphics_t *graphics = thread_graphics;
  1243. if (!gs_valid("gs_vertexbuffer_create"))
  1244. return NULL;
  1245. if (data && data->num && (flags & GS_DUP_BUFFER) != 0) {
  1246. struct gs_vb_data *new_data = gs_vbdata_create();
  1247. new_data->num = data->num;
  1248. #define DUP_VAL(val) \
  1249. do { \
  1250. if (data->val) \
  1251. new_data->val = bmemdup(data->val, sizeof(*data->val) * data->num); \
  1252. } while (false)
  1253. DUP_VAL(points);
  1254. DUP_VAL(normals);
  1255. DUP_VAL(tangents);
  1256. DUP_VAL(colors);
  1257. #undef DUP_VAL
  1258. if (data->tvarray && data->num_tex) {
  1259. new_data->num_tex = data->num_tex;
  1260. new_data->tvarray = bzalloc(sizeof(struct gs_tvertarray) * data->num_tex);
  1261. for (size_t i = 0; i < data->num_tex; i++) {
  1262. struct gs_tvertarray *tv = &data->tvarray[i];
  1263. struct gs_tvertarray *new_tv = &new_data->tvarray[i];
  1264. size_t size = tv->width * sizeof(float);
  1265. new_tv->width = tv->width;
  1266. new_tv->array = bmemdup(tv->array, size * data->num);
  1267. }
  1268. }
  1269. data = new_data;
  1270. }
  1271. return graphics->exports.device_vertexbuffer_create(graphics->device, data, flags);
  1272. }
  1273. gs_indexbuffer_t *gs_indexbuffer_create(enum gs_index_type type, void *indices, size_t num, uint32_t flags)
  1274. {
  1275. graphics_t *graphics = thread_graphics;
  1276. if (!gs_valid("gs_indexbuffer_create"))
  1277. return NULL;
  1278. if (indices && num && (flags & GS_DUP_BUFFER) != 0) {
  1279. size_t size = type == GS_UNSIGNED_SHORT ? 2 : 4;
  1280. indices = bmemdup(indices, size * num);
  1281. }
  1282. return graphics->exports.device_indexbuffer_create(graphics->device, type, indices, num, flags);
  1283. }
  1284. gs_timer_t *gs_timer_create()
  1285. {
  1286. graphics_t *graphics = thread_graphics;
  1287. if (!gs_valid("gs_timer_create"))
  1288. return NULL;
  1289. return graphics->exports.device_timer_create(graphics->device);
  1290. }
  1291. gs_timer_range_t *gs_timer_range_create()
  1292. {
  1293. graphics_t *graphics = thread_graphics;
  1294. if (!gs_valid("gs_timer_range_create"))
  1295. return NULL;
  1296. return graphics->exports.device_timer_range_create(graphics->device);
  1297. }
  1298. enum gs_texture_type gs_get_texture_type(const gs_texture_t *texture)
  1299. {
  1300. graphics_t *graphics = thread_graphics;
  1301. if (!gs_valid_p("gs_get_texture_type", texture))
  1302. return GS_TEXTURE_2D;
  1303. return graphics->exports.device_get_texture_type(texture);
  1304. }
  1305. void gs_load_vertexbuffer(gs_vertbuffer_t *vertbuffer)
  1306. {
  1307. graphics_t *graphics = thread_graphics;
  1308. if (!gs_valid("gs_load_vertexbuffer"))
  1309. return;
  1310. graphics->exports.device_load_vertexbuffer(graphics->device, vertbuffer);
  1311. }
  1312. void gs_load_indexbuffer(gs_indexbuffer_t *indexbuffer)
  1313. {
  1314. graphics_t *graphics = thread_graphics;
  1315. if (!gs_valid("gs_load_indexbuffer"))
  1316. return;
  1317. graphics->exports.device_load_indexbuffer(graphics->device, indexbuffer);
  1318. }
  1319. void gs_load_texture(gs_texture_t *tex, int unit)
  1320. {
  1321. graphics_t *graphics = thread_graphics;
  1322. if (!gs_valid("gs_load_texture"))
  1323. return;
  1324. graphics->exports.device_load_texture(graphics->device, tex, unit);
  1325. }
  1326. void gs_load_samplerstate(gs_samplerstate_t *samplerstate, int unit)
  1327. {
  1328. graphics_t *graphics = thread_graphics;
  1329. if (!gs_valid("gs_load_samplerstate"))
  1330. return;
  1331. graphics->exports.device_load_samplerstate(graphics->device, samplerstate, unit);
  1332. }
  1333. void gs_load_vertexshader(gs_shader_t *vertshader)
  1334. {
  1335. graphics_t *graphics = thread_graphics;
  1336. if (!gs_valid("gs_load_vertexshader"))
  1337. return;
  1338. graphics->exports.device_load_vertexshader(graphics->device, vertshader);
  1339. }
  1340. void gs_load_pixelshader(gs_shader_t *pixelshader)
  1341. {
  1342. graphics_t *graphics = thread_graphics;
  1343. if (!gs_valid("gs_load_pixelshader"))
  1344. return;
  1345. graphics->exports.device_load_pixelshader(graphics->device, pixelshader);
  1346. }
  1347. void gs_load_default_samplerstate(bool b_3d, int unit)
  1348. {
  1349. graphics_t *graphics = thread_graphics;
  1350. if (!gs_valid("gs_load_default_samplerstate"))
  1351. return;
  1352. graphics->exports.device_load_default_samplerstate(graphics->device, b_3d, unit);
  1353. }
  1354. gs_shader_t *gs_get_vertex_shader(void)
  1355. {
  1356. graphics_t *graphics = thread_graphics;
  1357. if (!gs_valid("gs_get_vertex_shader"))
  1358. return NULL;
  1359. return graphics->exports.device_get_vertex_shader(graphics->device);
  1360. }
  1361. gs_shader_t *gs_get_pixel_shader(void)
  1362. {
  1363. graphics_t *graphics = thread_graphics;
  1364. if (!gs_valid("gs_get_pixel_shader"))
  1365. return NULL;
  1366. return graphics->exports.device_get_pixel_shader(graphics->device);
  1367. }
  1368. enum gs_color_space gs_get_color_space(void)
  1369. {
  1370. graphics_t *graphics = thread_graphics;
  1371. if (!gs_valid("gs_get_color_space"))
  1372. return GS_CS_SRGB;
  1373. return graphics->exports.device_get_color_space(graphics->device);
  1374. }
  1375. gs_texture_t *gs_get_render_target(void)
  1376. {
  1377. graphics_t *graphics = thread_graphics;
  1378. if (!gs_valid("gs_get_render_target"))
  1379. return NULL;
  1380. return graphics->exports.device_get_render_target(graphics->device);
  1381. }
  1382. gs_zstencil_t *gs_get_zstencil_target(void)
  1383. {
  1384. graphics_t *graphics = thread_graphics;
  1385. if (!gs_valid("gs_get_zstencil_target"))
  1386. return NULL;
  1387. return graphics->exports.device_get_zstencil_target(graphics->device);
  1388. }
  1389. void gs_set_render_target(gs_texture_t *tex, gs_zstencil_t *zstencil)
  1390. {
  1391. graphics_t *graphics = thread_graphics;
  1392. if (!gs_valid("gs_set_render_target"))
  1393. return;
  1394. graphics->exports.device_set_render_target(graphics->device, tex, zstencil);
  1395. }
  1396. void gs_set_render_target_with_color_space(gs_texture_t *tex, gs_zstencil_t *zstencil, enum gs_color_space space)
  1397. {
  1398. graphics_t *graphics = thread_graphics;
  1399. if (!gs_valid("gs_set_render_target_with_color_space"))
  1400. return;
  1401. graphics->exports.device_set_render_target_with_color_space(graphics->device, tex, zstencil, space);
  1402. }
  1403. void gs_set_cube_render_target(gs_texture_t *cubetex, int side, gs_zstencil_t *zstencil)
  1404. {
  1405. graphics_t *graphics = thread_graphics;
  1406. if (!gs_valid("gs_set_cube_render_target"))
  1407. return;
  1408. graphics->exports.device_set_cube_render_target(graphics->device, cubetex, side, zstencil);
  1409. }
  1410. void gs_enable_framebuffer_srgb(bool enable)
  1411. {
  1412. graphics_t *graphics = thread_graphics;
  1413. if (!gs_valid("gs_enable_framebuffer_srgb"))
  1414. return;
  1415. graphics->exports.device_enable_framebuffer_srgb(graphics->device, enable);
  1416. }
  1417. bool gs_framebuffer_srgb_enabled(void)
  1418. {
  1419. graphics_t *graphics = thread_graphics;
  1420. if (!gs_valid("gs_framebuffer_srgb_enabled"))
  1421. return false;
  1422. return graphics->exports.device_framebuffer_srgb_enabled(graphics->device);
  1423. }
  1424. bool gs_get_linear_srgb(void)
  1425. {
  1426. graphics_t *graphics = thread_graphics;
  1427. if (!gs_valid("gs_get_linear_srgb"))
  1428. return false;
  1429. return graphics->linear_srgb;
  1430. }
  1431. bool gs_set_linear_srgb(bool linear_srgb)
  1432. {
  1433. graphics_t *graphics = thread_graphics;
  1434. if (!gs_valid("gs_set_linear_srgb"))
  1435. return false;
  1436. const bool previous = graphics->linear_srgb;
  1437. graphics->linear_srgb = linear_srgb;
  1438. return previous;
  1439. }
  1440. void gs_copy_texture(gs_texture_t *dst, gs_texture_t *src)
  1441. {
  1442. graphics_t *graphics = thread_graphics;
  1443. if (!gs_valid_p2("gs_copy_texture", dst, src))
  1444. return;
  1445. graphics->exports.device_copy_texture(graphics->device, dst, src);
  1446. }
  1447. void gs_copy_texture_region(gs_texture_t *dst, uint32_t dst_x, uint32_t dst_y, gs_texture_t *src, uint32_t src_x,
  1448. uint32_t src_y, uint32_t src_w, uint32_t src_h)
  1449. {
  1450. graphics_t *graphics = thread_graphics;
  1451. if (!gs_valid_p("gs_copy_texture_region", dst))
  1452. return;
  1453. graphics->exports.device_copy_texture_region(graphics->device, dst, dst_x, dst_y, src, src_x, src_y, src_w,
  1454. src_h);
  1455. }
  1456. void gs_stage_texture(gs_stagesurf_t *dst, gs_texture_t *src)
  1457. {
  1458. graphics_t *graphics = thread_graphics;
  1459. if (!gs_valid("gs_stage_texture"))
  1460. return;
  1461. graphics->exports.device_stage_texture(graphics->device, dst, src);
  1462. }
  1463. void gs_begin_frame(void)
  1464. {
  1465. graphics_t *graphics = thread_graphics;
  1466. if (!gs_valid("gs_begin_frame"))
  1467. return;
  1468. graphics->exports.device_begin_frame(graphics->device);
  1469. }
  1470. void gs_begin_scene(void)
  1471. {
  1472. graphics_t *graphics = thread_graphics;
  1473. if (!gs_valid("gs_begin_scene"))
  1474. return;
  1475. graphics->exports.device_begin_scene(graphics->device);
  1476. }
  1477. void gs_draw(enum gs_draw_mode draw_mode, uint32_t start_vert, uint32_t num_verts)
  1478. {
  1479. graphics_t *graphics = thread_graphics;
  1480. if (!gs_valid("gs_draw"))
  1481. return;
  1482. graphics->exports.device_draw(graphics->device, draw_mode, start_vert, num_verts);
  1483. }
  1484. void gs_end_scene(void)
  1485. {
  1486. graphics_t *graphics = thread_graphics;
  1487. if (!gs_valid("gs_end_scene"))
  1488. return;
  1489. graphics->exports.device_end_scene(graphics->device);
  1490. }
  1491. void gs_load_swapchain(gs_swapchain_t *swapchain)
  1492. {
  1493. graphics_t *graphics = thread_graphics;
  1494. if (!gs_valid("gs_load_swapchain"))
  1495. return;
  1496. graphics->exports.device_load_swapchain(graphics->device, swapchain);
  1497. }
  1498. void gs_clear(uint32_t clear_flags, const struct vec4 *color, float depth, uint8_t stencil)
  1499. {
  1500. graphics_t *graphics = thread_graphics;
  1501. if (!gs_valid("gs_clear"))
  1502. return;
  1503. graphics->exports.device_clear(graphics->device, clear_flags, color, depth, stencil);
  1504. }
  1505. bool gs_is_present_ready(void)
  1506. {
  1507. graphics_t *graphics = thread_graphics;
  1508. if (!gs_valid("gs_is_present_ready"))
  1509. return false;
  1510. return graphics->exports.device_is_present_ready(graphics->device);
  1511. }
  1512. void gs_present(void)
  1513. {
  1514. graphics_t *graphics = thread_graphics;
  1515. if (!gs_valid("gs_present"))
  1516. return;
  1517. graphics->exports.device_present(graphics->device);
  1518. }
  1519. void gs_flush(void)
  1520. {
  1521. graphics_t *graphics = thread_graphics;
  1522. if (!gs_valid("gs_flush"))
  1523. return;
  1524. graphics->exports.device_flush(graphics->device);
  1525. }
  1526. void gs_set_cull_mode(enum gs_cull_mode mode)
  1527. {
  1528. graphics_t *graphics = thread_graphics;
  1529. if (!gs_valid("gs_set_cull_mode"))
  1530. return;
  1531. graphics->exports.device_set_cull_mode(graphics->device, mode);
  1532. }
  1533. enum gs_cull_mode gs_get_cull_mode(void)
  1534. {
  1535. graphics_t *graphics = thread_graphics;
  1536. if (!gs_valid("gs_get_cull_mode"))
  1537. return GS_NEITHER;
  1538. return graphics->exports.device_get_cull_mode(graphics->device);
  1539. }
  1540. void gs_enable_blending(bool enable)
  1541. {
  1542. graphics_t *graphics = thread_graphics;
  1543. if (!gs_valid("gs_enable_blending"))
  1544. return;
  1545. graphics->cur_blend_state.enabled = enable;
  1546. graphics->exports.device_enable_blending(graphics->device, enable);
  1547. }
  1548. void gs_enable_depth_test(bool enable)
  1549. {
  1550. graphics_t *graphics = thread_graphics;
  1551. if (!gs_valid("gs_enable_depth_test"))
  1552. return;
  1553. graphics->exports.device_enable_depth_test(graphics->device, enable);
  1554. }
  1555. void gs_enable_stencil_test(bool enable)
  1556. {
  1557. graphics_t *graphics = thread_graphics;
  1558. if (!gs_valid("gs_enable_stencil_test"))
  1559. return;
  1560. graphics->exports.device_enable_stencil_test(graphics->device, enable);
  1561. }
  1562. void gs_enable_stencil_write(bool enable)
  1563. {
  1564. graphics_t *graphics = thread_graphics;
  1565. if (!gs_valid("gs_enable_stencil_write"))
  1566. return;
  1567. graphics->exports.device_enable_stencil_write(graphics->device, enable);
  1568. }
  1569. void gs_enable_color(bool red, bool green, bool blue, bool alpha)
  1570. {
  1571. graphics_t *graphics = thread_graphics;
  1572. if (!gs_valid("gs_enable_color"))
  1573. return;
  1574. graphics->exports.device_enable_color(graphics->device, red, green, blue, alpha);
  1575. }
  1576. void gs_blend_function(enum gs_blend_type src, enum gs_blend_type dest)
  1577. {
  1578. graphics_t *graphics = thread_graphics;
  1579. if (!gs_valid("gs_blend_function"))
  1580. return;
  1581. graphics->cur_blend_state.src_c = src;
  1582. graphics->cur_blend_state.dest_c = dest;
  1583. graphics->cur_blend_state.src_a = src;
  1584. graphics->cur_blend_state.dest_a = dest;
  1585. graphics->exports.device_blend_function(graphics->device, src, dest);
  1586. }
  1587. void gs_blend_function_separate(enum gs_blend_type src_c, enum gs_blend_type dest_c, enum gs_blend_type src_a,
  1588. enum gs_blend_type dest_a)
  1589. {
  1590. graphics_t *graphics = thread_graphics;
  1591. if (!gs_valid("gs_blend_function_separate"))
  1592. return;
  1593. graphics->cur_blend_state.src_c = src_c;
  1594. graphics->cur_blend_state.dest_c = dest_c;
  1595. graphics->cur_blend_state.src_a = src_a;
  1596. graphics->cur_blend_state.dest_a = dest_a;
  1597. graphics->exports.device_blend_function_separate(graphics->device, src_c, dest_c, src_a, dest_a);
  1598. }
  1599. void gs_blend_op(enum gs_blend_op_type op)
  1600. {
  1601. graphics_t *graphics = thread_graphics;
  1602. if (!gs_valid("gs_blend_op"))
  1603. return;
  1604. graphics->cur_blend_state.op = op;
  1605. graphics->exports.device_blend_op(graphics->device, graphics->cur_blend_state.op);
  1606. }
  1607. void gs_depth_function(enum gs_depth_test test)
  1608. {
  1609. graphics_t *graphics = thread_graphics;
  1610. if (!gs_valid("gs_depth_function"))
  1611. return;
  1612. graphics->exports.device_depth_function(graphics->device, test);
  1613. }
  1614. void gs_stencil_function(enum gs_stencil_side side, enum gs_depth_test test)
  1615. {
  1616. graphics_t *graphics = thread_graphics;
  1617. if (!gs_valid("gs_stencil_function"))
  1618. return;
  1619. graphics->exports.device_stencil_function(graphics->device, side, test);
  1620. }
  1621. void gs_stencil_op(enum gs_stencil_side side, enum gs_stencil_op_type fail, enum gs_stencil_op_type zfail,
  1622. enum gs_stencil_op_type zpass)
  1623. {
  1624. graphics_t *graphics = thread_graphics;
  1625. if (!gs_valid("gs_stencil_op"))
  1626. return;
  1627. graphics->exports.device_stencil_op(graphics->device, side, fail, zfail, zpass);
  1628. }
  1629. void gs_set_viewport(int x, int y, int width, int height)
  1630. {
  1631. graphics_t *graphics = thread_graphics;
  1632. if (!gs_valid("gs_set_viewport"))
  1633. return;
  1634. graphics->exports.device_set_viewport(graphics->device, x, y, width, height);
  1635. }
  1636. void gs_get_viewport(struct gs_rect *rect)
  1637. {
  1638. graphics_t *graphics = thread_graphics;
  1639. if (!gs_valid_p("gs_get_viewport", rect))
  1640. return;
  1641. graphics->exports.device_get_viewport(graphics->device, rect);
  1642. }
  1643. void gs_set_scissor_rect(const struct gs_rect *rect)
  1644. {
  1645. graphics_t *graphics = thread_graphics;
  1646. if (!gs_valid("gs_set_scissor_rect"))
  1647. return;
  1648. graphics->exports.device_set_scissor_rect(graphics->device, rect);
  1649. }
  1650. void gs_ortho(float left, float right, float top, float bottom, float znear, float zfar)
  1651. {
  1652. graphics_t *graphics = thread_graphics;
  1653. if (!gs_valid("gs_ortho"))
  1654. return;
  1655. graphics->exports.device_ortho(graphics->device, left, right, top, bottom, znear, zfar);
  1656. }
  1657. void gs_frustum(float left, float right, float top, float bottom, float znear, float zfar)
  1658. {
  1659. graphics_t *graphics = thread_graphics;
  1660. if (!gs_valid("gs_frustum"))
  1661. return;
  1662. graphics->exports.device_frustum(graphics->device, left, right, top, bottom, znear, zfar);
  1663. }
  1664. void gs_projection_push(void)
  1665. {
  1666. graphics_t *graphics = thread_graphics;
  1667. if (!gs_valid("gs_projection_push"))
  1668. return;
  1669. graphics->exports.device_projection_push(graphics->device);
  1670. }
  1671. void gs_projection_pop(void)
  1672. {
  1673. graphics_t *graphics = thread_graphics;
  1674. if (!gs_valid("gs_projection_pop"))
  1675. return;
  1676. graphics->exports.device_projection_pop(graphics->device);
  1677. }
  1678. void gs_swapchain_destroy(gs_swapchain_t *swapchain)
  1679. {
  1680. graphics_t *graphics = thread_graphics;
  1681. if (!gs_valid("gs_swapchain_destroy"))
  1682. return;
  1683. if (!swapchain)
  1684. return;
  1685. graphics->exports.gs_swapchain_destroy(swapchain);
  1686. }
  1687. void gs_shader_destroy(gs_shader_t *shader)
  1688. {
  1689. graphics_t *graphics = thread_graphics;
  1690. if (!gs_valid("gs_shader_destroy"))
  1691. return;
  1692. if (!shader)
  1693. return;
  1694. graphics->exports.gs_shader_destroy(shader);
  1695. }
  1696. int gs_shader_get_num_params(const gs_shader_t *shader)
  1697. {
  1698. graphics_t *graphics = thread_graphics;
  1699. if (!gs_valid_p("gs_shader_get_num_params", shader))
  1700. return 0;
  1701. return graphics->exports.gs_shader_get_num_params(shader);
  1702. }
  1703. gs_sparam_t *gs_shader_get_param_by_idx(gs_shader_t *shader, uint32_t param)
  1704. {
  1705. graphics_t *graphics = thread_graphics;
  1706. if (!gs_valid_p("gs_shader_get_param_by_idx", shader))
  1707. return NULL;
  1708. return graphics->exports.gs_shader_get_param_by_idx(shader, param);
  1709. }
  1710. gs_sparam_t *gs_shader_get_param_by_name(gs_shader_t *shader, const char *name)
  1711. {
  1712. graphics_t *graphics = thread_graphics;
  1713. if (!gs_valid_p2("gs_shader_get_param_by_name", shader, name))
  1714. return NULL;
  1715. return graphics->exports.gs_shader_get_param_by_name(shader, name);
  1716. }
  1717. gs_sparam_t *gs_shader_get_viewproj_matrix(const gs_shader_t *shader)
  1718. {
  1719. graphics_t *graphics = thread_graphics;
  1720. if (!gs_valid_p("gs_shader_get_viewproj_matrix", shader))
  1721. return NULL;
  1722. return graphics->exports.gs_shader_get_viewproj_matrix(shader);
  1723. }
  1724. gs_sparam_t *gs_shader_get_world_matrix(const gs_shader_t *shader)
  1725. {
  1726. graphics_t *graphics = thread_graphics;
  1727. if (!gs_valid_p("gs_shader_get_world_matrix", shader))
  1728. return NULL;
  1729. return graphics->exports.gs_shader_get_world_matrix(shader);
  1730. }
  1731. void gs_shader_get_param_info(const gs_sparam_t *param, struct gs_shader_param_info *info)
  1732. {
  1733. graphics_t *graphics = thread_graphics;
  1734. if (!gs_valid_p2("gs_shader_get_param_info", param, info))
  1735. return;
  1736. graphics->exports.gs_shader_get_param_info(param, info);
  1737. }
  1738. void gs_shader_set_bool(gs_sparam_t *param, bool val)
  1739. {
  1740. graphics_t *graphics = thread_graphics;
  1741. if (!gs_valid_p("gs_shader_set_bool", param))
  1742. return;
  1743. graphics->exports.gs_shader_set_bool(param, val);
  1744. }
  1745. void gs_shader_set_float(gs_sparam_t *param, float val)
  1746. {
  1747. graphics_t *graphics = thread_graphics;
  1748. if (!gs_valid_p("gs_shader_set_float", param))
  1749. return;
  1750. graphics->exports.gs_shader_set_float(param, val);
  1751. }
  1752. void gs_shader_set_int(gs_sparam_t *param, int val)
  1753. {
  1754. graphics_t *graphics = thread_graphics;
  1755. if (!gs_valid_p("gs_shader_set_int", param))
  1756. return;
  1757. graphics->exports.gs_shader_set_int(param, val);
  1758. }
  1759. void gs_shader_set_matrix3(gs_sparam_t *param, const struct matrix3 *val)
  1760. {
  1761. graphics_t *graphics = thread_graphics;
  1762. if (!gs_valid_p2("gs_shader_set_matrix3", param, val))
  1763. return;
  1764. graphics->exports.gs_shader_set_matrix3(param, val);
  1765. }
  1766. void gs_shader_set_matrix4(gs_sparam_t *param, const struct matrix4 *val)
  1767. {
  1768. graphics_t *graphics = thread_graphics;
  1769. if (!gs_valid_p2("gs_shader_set_matrix4", param, val))
  1770. return;
  1771. graphics->exports.gs_shader_set_matrix4(param, val);
  1772. }
  1773. void gs_shader_set_vec2(gs_sparam_t *param, const struct vec2 *val)
  1774. {
  1775. graphics_t *graphics = thread_graphics;
  1776. if (!gs_valid_p2("gs_shader_set_vec2", param, val))
  1777. return;
  1778. graphics->exports.gs_shader_set_vec2(param, val);
  1779. }
  1780. void gs_shader_set_vec3(gs_sparam_t *param, const struct vec3 *val)
  1781. {
  1782. graphics_t *graphics = thread_graphics;
  1783. if (!gs_valid_p2("gs_shader_set_vec3", param, val))
  1784. return;
  1785. graphics->exports.gs_shader_set_vec3(param, val);
  1786. }
  1787. void gs_shader_set_vec4(gs_sparam_t *param, const struct vec4 *val)
  1788. {
  1789. graphics_t *graphics = thread_graphics;
  1790. if (!gs_valid_p2("gs_shader_set_vec4", param, val))
  1791. return;
  1792. graphics->exports.gs_shader_set_vec4(param, val);
  1793. }
  1794. void gs_shader_set_texture(gs_sparam_t *param, gs_texture_t *val)
  1795. {
  1796. graphics_t *graphics = thread_graphics;
  1797. if (!gs_valid_p("gs_shader_set_texture", param))
  1798. return;
  1799. graphics->exports.gs_shader_set_texture(param, val);
  1800. }
  1801. void gs_shader_set_val(gs_sparam_t *param, const void *val, size_t size)
  1802. {
  1803. graphics_t *graphics = thread_graphics;
  1804. if (!gs_valid_p2("gs_shader_set_val", param, val))
  1805. return;
  1806. graphics->exports.gs_shader_set_val(param, val, size);
  1807. }
  1808. void gs_shader_set_default(gs_sparam_t *param)
  1809. {
  1810. graphics_t *graphics = thread_graphics;
  1811. if (!gs_valid_p("gs_shader_set_default", param))
  1812. return;
  1813. graphics->exports.gs_shader_set_default(param);
  1814. }
  1815. void gs_shader_set_next_sampler(gs_sparam_t *param, gs_samplerstate_t *sampler)
  1816. {
  1817. graphics_t *graphics = thread_graphics;
  1818. if (!gs_valid_p("gs_shader_set_next_sampler", param))
  1819. return;
  1820. graphics->exports.gs_shader_set_next_sampler(param, sampler);
  1821. }
  1822. void gs_texture_destroy(gs_texture_t *tex)
  1823. {
  1824. graphics_t *graphics = thread_graphics;
  1825. if (!gs_valid("gs_texture_destroy"))
  1826. return;
  1827. if (!tex)
  1828. return;
  1829. graphics->exports.gs_texture_destroy(tex);
  1830. }
  1831. uint32_t gs_texture_get_width(const gs_texture_t *tex)
  1832. {
  1833. graphics_t *graphics = thread_graphics;
  1834. if (!gs_valid_p("gs_texture_get_width", tex))
  1835. return 0;
  1836. return graphics->exports.gs_texture_get_width(tex);
  1837. }
  1838. uint32_t gs_texture_get_height(const gs_texture_t *tex)
  1839. {
  1840. graphics_t *graphics = thread_graphics;
  1841. if (!gs_valid_p("gs_texture_get_height", tex))
  1842. return 0;
  1843. return graphics->exports.gs_texture_get_height(tex);
  1844. }
  1845. enum gs_color_format gs_texture_get_color_format(const gs_texture_t *tex)
  1846. {
  1847. graphics_t *graphics = thread_graphics;
  1848. if (!gs_valid_p("gs_texture_get_color_format", tex))
  1849. return GS_UNKNOWN;
  1850. return graphics->exports.gs_texture_get_color_format(tex);
  1851. }
  1852. bool gs_texture_map(gs_texture_t *tex, uint8_t **ptr, uint32_t *linesize)
  1853. {
  1854. graphics_t *graphics = thread_graphics;
  1855. if (!gs_valid_p3("gs_texture_map", tex, ptr, linesize))
  1856. return false;
  1857. return graphics->exports.gs_texture_map(tex, ptr, linesize);
  1858. }
  1859. void gs_texture_unmap(gs_texture_t *tex)
  1860. {
  1861. graphics_t *graphics = thread_graphics;
  1862. if (!gs_valid_p("gs_texture_unmap", tex))
  1863. return;
  1864. graphics->exports.gs_texture_unmap(tex);
  1865. }
  1866. bool gs_texture_is_rect(const gs_texture_t *tex)
  1867. {
  1868. graphics_t *graphics = thread_graphics;
  1869. if (!gs_valid_p("gs_texture_is_rect", tex))
  1870. return false;
  1871. if (graphics->exports.gs_texture_is_rect)
  1872. return graphics->exports.gs_texture_is_rect(tex);
  1873. else
  1874. return false;
  1875. }
  1876. void *gs_texture_get_obj(gs_texture_t *tex)
  1877. {
  1878. graphics_t *graphics = thread_graphics;
  1879. if (!gs_valid_p("gs_texture_get_obj", tex))
  1880. return NULL;
  1881. return graphics->exports.gs_texture_get_obj(tex);
  1882. }
  1883. void gs_cubetexture_destroy(gs_texture_t *cubetex)
  1884. {
  1885. graphics_t *graphics = thread_graphics;
  1886. if (!gs_valid("gs_cubetexture_destroy"))
  1887. return;
  1888. if (!cubetex)
  1889. return;
  1890. graphics->exports.gs_cubetexture_destroy(cubetex);
  1891. }
  1892. uint32_t gs_cubetexture_get_size(const gs_texture_t *cubetex)
  1893. {
  1894. graphics_t *graphics = thread_graphics;
  1895. if (!gs_valid_p("gs_cubetexture_get_size", cubetex))
  1896. return 0;
  1897. return graphics->exports.gs_cubetexture_get_size(cubetex);
  1898. }
  1899. enum gs_color_format gs_cubetexture_get_color_format(const gs_texture_t *cubetex)
  1900. {
  1901. graphics_t *graphics = thread_graphics;
  1902. if (!gs_valid_p("gs_cubetexture_get_color_format", cubetex))
  1903. return GS_UNKNOWN;
  1904. return graphics->exports.gs_cubetexture_get_color_format(cubetex);
  1905. }
  1906. void gs_voltexture_destroy(gs_texture_t *voltex)
  1907. {
  1908. graphics_t *graphics = thread_graphics;
  1909. if (!gs_valid("gs_voltexture_destroy"))
  1910. return;
  1911. if (!voltex)
  1912. return;
  1913. graphics->exports.gs_voltexture_destroy(voltex);
  1914. }
  1915. uint32_t gs_voltexture_get_width(const gs_texture_t *voltex)
  1916. {
  1917. graphics_t *graphics = thread_graphics;
  1918. if (!gs_valid_p("gs_voltexture_get_width", voltex))
  1919. return 0;
  1920. return graphics->exports.gs_voltexture_get_width(voltex);
  1921. }
  1922. uint32_t gs_voltexture_get_height(const gs_texture_t *voltex)
  1923. {
  1924. graphics_t *graphics = thread_graphics;
  1925. if (!gs_valid_p("gs_voltexture_get_height", voltex))
  1926. return 0;
  1927. return graphics->exports.gs_voltexture_get_height(voltex);
  1928. }
  1929. uint32_t gs_voltexture_get_depth(const gs_texture_t *voltex)
  1930. {
  1931. graphics_t *graphics = thread_graphics;
  1932. if (!gs_valid_p("gs_voltexture_get_depth", voltex))
  1933. return 0;
  1934. return graphics->exports.gs_voltexture_get_depth(voltex);
  1935. }
  1936. enum gs_color_format gs_voltexture_get_color_format(const gs_texture_t *voltex)
  1937. {
  1938. graphics_t *graphics = thread_graphics;
  1939. if (!gs_valid_p("gs_voltexture_get_color_format", voltex))
  1940. return GS_UNKNOWN;
  1941. return graphics->exports.gs_voltexture_get_color_format(voltex);
  1942. }
  1943. void gs_stagesurface_destroy(gs_stagesurf_t *stagesurf)
  1944. {
  1945. graphics_t *graphics = thread_graphics;
  1946. if (!gs_valid("gs_stagesurface_destroy"))
  1947. return;
  1948. if (!stagesurf)
  1949. return;
  1950. graphics->exports.gs_stagesurface_destroy(stagesurf);
  1951. }
  1952. uint32_t gs_stagesurface_get_width(const gs_stagesurf_t *stagesurf)
  1953. {
  1954. graphics_t *graphics = thread_graphics;
  1955. if (!gs_valid_p("gs_stagesurface_get_width", stagesurf))
  1956. return 0;
  1957. return graphics->exports.gs_stagesurface_get_width(stagesurf);
  1958. }
  1959. uint32_t gs_stagesurface_get_height(const gs_stagesurf_t *stagesurf)
  1960. {
  1961. graphics_t *graphics = thread_graphics;
  1962. if (!gs_valid_p("gs_stagesurface_get_height", stagesurf))
  1963. return 0;
  1964. return graphics->exports.gs_stagesurface_get_height(stagesurf);
  1965. }
  1966. enum gs_color_format gs_stagesurface_get_color_format(const gs_stagesurf_t *stagesurf)
  1967. {
  1968. graphics_t *graphics = thread_graphics;
  1969. if (!gs_valid_p("gs_stagesurface_get_color_format", stagesurf))
  1970. return GS_UNKNOWN;
  1971. return graphics->exports.gs_stagesurface_get_color_format(stagesurf);
  1972. }
  1973. bool gs_stagesurface_map(gs_stagesurf_t *stagesurf, uint8_t **data, uint32_t *linesize)
  1974. {
  1975. graphics_t *graphics = thread_graphics;
  1976. if (!gs_valid_p3("gs_stagesurface_map", stagesurf, data, linesize))
  1977. return 0;
  1978. return graphics->exports.gs_stagesurface_map(stagesurf, data, linesize);
  1979. }
  1980. void gs_stagesurface_unmap(gs_stagesurf_t *stagesurf)
  1981. {
  1982. graphics_t *graphics = thread_graphics;
  1983. if (!gs_valid_p("gs_stagesurface_unmap", stagesurf))
  1984. return;
  1985. graphics->exports.gs_stagesurface_unmap(stagesurf);
  1986. }
  1987. void gs_zstencil_destroy(gs_zstencil_t *zstencil)
  1988. {
  1989. if (!gs_valid("gs_zstencil_destroy"))
  1990. return;
  1991. if (!zstencil)
  1992. return;
  1993. thread_graphics->exports.gs_zstencil_destroy(zstencil);
  1994. }
  1995. void gs_samplerstate_destroy(gs_samplerstate_t *samplerstate)
  1996. {
  1997. if (!gs_valid("gs_samplerstate_destroy"))
  1998. return;
  1999. if (!samplerstate)
  2000. return;
  2001. thread_graphics->exports.gs_samplerstate_destroy(samplerstate);
  2002. }
  2003. void gs_vertexbuffer_destroy(gs_vertbuffer_t *vertbuffer)
  2004. {
  2005. graphics_t *graphics = thread_graphics;
  2006. if (!gs_valid("gs_vertexbuffer_destroy"))
  2007. return;
  2008. if (!vertbuffer)
  2009. return;
  2010. graphics->exports.gs_vertexbuffer_destroy(vertbuffer);
  2011. }
  2012. void gs_vertexbuffer_flush(gs_vertbuffer_t *vertbuffer)
  2013. {
  2014. if (!gs_valid_p("gs_vertexbuffer_flush", vertbuffer))
  2015. return;
  2016. thread_graphics->exports.gs_vertexbuffer_flush(vertbuffer);
  2017. }
  2018. void gs_vertexbuffer_flush_direct(gs_vertbuffer_t *vertbuffer, const struct gs_vb_data *data)
  2019. {
  2020. if (!gs_valid_p2("gs_vertexbuffer_flush_direct", vertbuffer, data))
  2021. return;
  2022. thread_graphics->exports.gs_vertexbuffer_flush_direct(vertbuffer, data);
  2023. }
  2024. struct gs_vb_data *gs_vertexbuffer_get_data(const gs_vertbuffer_t *vertbuffer)
  2025. {
  2026. if (!gs_valid_p("gs_vertexbuffer_get_data", vertbuffer))
  2027. return NULL;
  2028. return thread_graphics->exports.gs_vertexbuffer_get_data(vertbuffer);
  2029. }
  2030. void gs_indexbuffer_destroy(gs_indexbuffer_t *indexbuffer)
  2031. {
  2032. graphics_t *graphics = thread_graphics;
  2033. if (!gs_valid("gs_indexbuffer_destroy"))
  2034. return;
  2035. if (!indexbuffer)
  2036. return;
  2037. graphics->exports.gs_indexbuffer_destroy(indexbuffer);
  2038. }
  2039. void gs_indexbuffer_flush(gs_indexbuffer_t *indexbuffer)
  2040. {
  2041. if (!gs_valid_p("gs_indexbuffer_flush", indexbuffer))
  2042. return;
  2043. thread_graphics->exports.gs_indexbuffer_flush(indexbuffer);
  2044. }
  2045. void gs_indexbuffer_flush_direct(gs_indexbuffer_t *indexbuffer, const void *data)
  2046. {
  2047. if (!gs_valid_p2("gs_indexbuffer_flush_direct", indexbuffer, data))
  2048. return;
  2049. thread_graphics->exports.gs_indexbuffer_flush_direct(indexbuffer, data);
  2050. }
  2051. void *gs_indexbuffer_get_data(const gs_indexbuffer_t *indexbuffer)
  2052. {
  2053. if (!gs_valid_p("gs_indexbuffer_get_data", indexbuffer))
  2054. return NULL;
  2055. return thread_graphics->exports.gs_indexbuffer_get_data(indexbuffer);
  2056. }
  2057. size_t gs_indexbuffer_get_num_indices(const gs_indexbuffer_t *indexbuffer)
  2058. {
  2059. if (!gs_valid_p("gs_indexbuffer_get_num_indices", indexbuffer))
  2060. return 0;
  2061. return thread_graphics->exports.gs_indexbuffer_get_num_indices(indexbuffer);
  2062. }
  2063. enum gs_index_type gs_indexbuffer_get_type(const gs_indexbuffer_t *indexbuffer)
  2064. {
  2065. if (!gs_valid_p("gs_indexbuffer_get_type", indexbuffer))
  2066. return (enum gs_index_type)0;
  2067. return thread_graphics->exports.gs_indexbuffer_get_type(indexbuffer);
  2068. }
  2069. void gs_timer_destroy(gs_timer_t *timer)
  2070. {
  2071. graphics_t *graphics = thread_graphics;
  2072. if (!gs_valid("gs_timer_destroy"))
  2073. return;
  2074. if (!timer)
  2075. return;
  2076. graphics->exports.gs_timer_destroy(timer);
  2077. }
  2078. void gs_timer_begin(gs_timer_t *timer)
  2079. {
  2080. graphics_t *graphics = thread_graphics;
  2081. if (!gs_valid("gs_timer_begin"))
  2082. return;
  2083. if (!timer)
  2084. return;
  2085. graphics->exports.gs_timer_begin(timer);
  2086. }
  2087. void gs_timer_end(gs_timer_t *timer)
  2088. {
  2089. graphics_t *graphics = thread_graphics;
  2090. if (!gs_valid("gs_timer_end"))
  2091. return;
  2092. if (!timer)
  2093. return;
  2094. graphics->exports.gs_timer_end(timer);
  2095. }
  2096. bool gs_timer_get_data(gs_timer_t *timer, uint64_t *ticks)
  2097. {
  2098. if (!gs_valid_p2("gs_timer_get_data", timer, ticks))
  2099. return false;
  2100. return thread_graphics->exports.gs_timer_get_data(timer, ticks);
  2101. }
  2102. void gs_timer_range_destroy(gs_timer_range_t *range)
  2103. {
  2104. graphics_t *graphics = thread_graphics;
  2105. if (!gs_valid("gs_timer_range_destroy"))
  2106. return;
  2107. if (!range)
  2108. return;
  2109. graphics->exports.gs_timer_range_destroy(range);
  2110. }
  2111. void gs_timer_range_begin(gs_timer_range_t *range)
  2112. {
  2113. graphics_t *graphics = thread_graphics;
  2114. if (!gs_valid("gs_timer_range_begin"))
  2115. return;
  2116. if (!range)
  2117. return;
  2118. graphics->exports.gs_timer_range_begin(range);
  2119. }
  2120. void gs_timer_range_end(gs_timer_range_t *range)
  2121. {
  2122. graphics_t *graphics = thread_graphics;
  2123. if (!gs_valid("gs_timer_range_end"))
  2124. return;
  2125. if (!range)
  2126. return;
  2127. graphics->exports.gs_timer_range_end(range);
  2128. }
  2129. bool gs_timer_range_get_data(gs_timer_range_t *range, bool *disjoint, uint64_t *frequency)
  2130. {
  2131. if (!gs_valid_p2("gs_timer_range_get_data", disjoint, frequency))
  2132. return false;
  2133. return thread_graphics->exports.gs_timer_range_get_data(range, disjoint, frequency);
  2134. }
  2135. bool gs_nv12_available(void)
  2136. {
  2137. if (!gs_valid("gs_nv12_available"))
  2138. return false;
  2139. if (!thread_graphics->exports.device_nv12_available)
  2140. return false;
  2141. return thread_graphics->exports.device_nv12_available(thread_graphics->device);
  2142. }
  2143. bool gs_p010_available(void)
  2144. {
  2145. if (!gs_valid("gs_p010_available"))
  2146. return false;
  2147. if (!thread_graphics->exports.device_p010_available)
  2148. return false;
  2149. return thread_graphics->exports.device_p010_available(thread_graphics->device);
  2150. }
  2151. bool gs_is_monitor_hdr(void *monitor)
  2152. {
  2153. if (!gs_valid("gs_is_monitor_hdr"))
  2154. return false;
  2155. return thread_graphics->exports.device_is_monitor_hdr(thread_graphics->device, monitor);
  2156. }
  2157. void gs_debug_marker_begin(const float color[4], const char *markername)
  2158. {
  2159. if (!gs_valid("gs_debug_marker_begin"))
  2160. return;
  2161. if (!markername)
  2162. markername = "(null)";
  2163. thread_graphics->exports.device_debug_marker_begin(thread_graphics->device, markername, color);
  2164. }
  2165. void gs_debug_marker_begin_format(const float color[4], const char *format, ...)
  2166. {
  2167. if (!gs_valid("gs_debug_marker_begin"))
  2168. return;
  2169. if (format) {
  2170. char markername[64];
  2171. va_list args;
  2172. va_start(args, format);
  2173. vsnprintf(markername, sizeof(markername), format, args);
  2174. va_end(args);
  2175. thread_graphics->exports.device_debug_marker_begin(thread_graphics->device, markername, color);
  2176. } else {
  2177. gs_debug_marker_begin(color, NULL);
  2178. }
  2179. }
  2180. void gs_debug_marker_end(void)
  2181. {
  2182. if (!gs_valid("gs_debug_marker_end"))
  2183. return;
  2184. thread_graphics->exports.device_debug_marker_end(thread_graphics->device);
  2185. }
  2186. bool gs_texture_create_nv12(gs_texture_t **tex_y, gs_texture_t **tex_uv, uint32_t width, uint32_t height,
  2187. uint32_t flags)
  2188. {
  2189. graphics_t *graphics = thread_graphics;
  2190. bool success = false;
  2191. if (!gs_valid("gs_texture_create_nv12"))
  2192. return false;
  2193. if ((width & 1) == 1 || (height & 1) == 1) {
  2194. blog(LOG_ERROR, "NV12 textures must have dimensions "
  2195. "divisible by 2.");
  2196. return false;
  2197. }
  2198. if (graphics->exports.device_texture_create_nv12) {
  2199. success = graphics->exports.device_texture_create_nv12(graphics->device, tex_y, tex_uv, width, height,
  2200. flags);
  2201. if (success)
  2202. return true;
  2203. }
  2204. *tex_y = gs_texture_create(width, height, GS_R8, 1, NULL, flags);
  2205. *tex_uv = gs_texture_create(width / 2, height / 2, GS_R8G8, 1, NULL, flags);
  2206. if (!*tex_y || !*tex_uv) {
  2207. if (*tex_y)
  2208. gs_texture_destroy(*tex_y);
  2209. if (*tex_uv)
  2210. gs_texture_destroy(*tex_uv);
  2211. *tex_y = NULL;
  2212. *tex_uv = NULL;
  2213. return false;
  2214. }
  2215. return true;
  2216. }
  2217. bool gs_texture_create_p010(gs_texture_t **tex_y, gs_texture_t **tex_uv, uint32_t width, uint32_t height,
  2218. uint32_t flags)
  2219. {
  2220. graphics_t *graphics = thread_graphics;
  2221. bool success = false;
  2222. if (!gs_valid("gs_texture_create_p010"))
  2223. return false;
  2224. if ((width & 1) == 1 || (height & 1) == 1) {
  2225. blog(LOG_ERROR, "P010 textures must have dimensions "
  2226. "divisible by 2.");
  2227. return false;
  2228. }
  2229. if (graphics->exports.device_texture_create_p010) {
  2230. success = graphics->exports.device_texture_create_p010(graphics->device, tex_y, tex_uv, width, height,
  2231. flags);
  2232. if (success)
  2233. return true;
  2234. }
  2235. *tex_y = gs_texture_create(width, height, GS_R16, 1, NULL, flags);
  2236. *tex_uv = gs_texture_create(width / 2, height / 2, GS_RG16, 1, NULL, flags);
  2237. if (!*tex_y || !*tex_uv) {
  2238. if (*tex_y)
  2239. gs_texture_destroy(*tex_y);
  2240. if (*tex_uv)
  2241. gs_texture_destroy(*tex_uv);
  2242. *tex_y = NULL;
  2243. *tex_uv = NULL;
  2244. return false;
  2245. }
  2246. return true;
  2247. }
  2248. uint32_t gs_get_adapter_count(void)
  2249. {
  2250. if (!gs_valid("gs_get_adapter_count"))
  2251. return 0;
  2252. if (!thread_graphics->exports.gs_get_adapter_count)
  2253. return 0;
  2254. return thread_graphics->exports.gs_get_adapter_count();
  2255. }
  2256. #ifdef __APPLE__
  2257. /** Platform specific functions */
  2258. gs_texture_t *gs_texture_create_from_iosurface(void *iosurf)
  2259. {
  2260. graphics_t *graphics = thread_graphics;
  2261. if (!gs_valid_p("gs_texture_create_from_iosurface", iosurf))
  2262. return NULL;
  2263. if (!graphics->exports.device_texture_create_from_iosurface)
  2264. return NULL;
  2265. return graphics->exports.device_texture_create_from_iosurface(graphics->device, iosurf);
  2266. }
  2267. bool gs_texture_rebind_iosurface(gs_texture_t *texture, void *iosurf)
  2268. {
  2269. graphics_t *graphics = thread_graphics;
  2270. if (!gs_valid_p("gs_texture_rebind_iosurface", texture))
  2271. return false;
  2272. if (!graphics->exports.gs_texture_rebind_iosurface)
  2273. return false;
  2274. return graphics->exports.gs_texture_rebind_iosurface(texture, iosurf);
  2275. }
  2276. bool gs_shared_texture_available(void)
  2277. {
  2278. if (!gs_valid("gs_shared_texture_available"))
  2279. return false;
  2280. return thread_graphics->exports.device_shared_texture_available();
  2281. }
  2282. gs_texture_t *gs_texture_open_shared(uint32_t handle)
  2283. {
  2284. graphics_t *graphics = thread_graphics;
  2285. if (!gs_valid("gs_texture_open_shared"))
  2286. return NULL;
  2287. if (graphics->exports.device_texture_open_shared)
  2288. return graphics->exports.device_texture_open_shared(graphics->device, handle);
  2289. return NULL;
  2290. }
  2291. #elif _WIN32
  2292. bool gs_gdi_texture_available(void)
  2293. {
  2294. if (!gs_valid("gs_gdi_texture_available"))
  2295. return false;
  2296. return thread_graphics->exports.device_gdi_texture_available();
  2297. }
  2298. bool gs_shared_texture_available(void)
  2299. {
  2300. if (!gs_valid("gs_shared_texture_available"))
  2301. return false;
  2302. return thread_graphics->exports.device_shared_texture_available();
  2303. }
  2304. bool gs_get_duplicator_monitor_info(int monitor_idx, struct gs_monitor_info *monitor_info)
  2305. {
  2306. if (!gs_valid_p("gs_get_duplicator_monitor_info", monitor_info))
  2307. return false;
  2308. if (!thread_graphics->exports.device_get_duplicator_monitor_info)
  2309. return false;
  2310. return thread_graphics->exports.device_get_duplicator_monitor_info(thread_graphics->device, monitor_idx,
  2311. monitor_info);
  2312. }
  2313. int gs_duplicator_get_monitor_index(void *monitor)
  2314. {
  2315. if (!gs_valid("gs_duplicator_get_monitor_index"))
  2316. return false;
  2317. if (!thread_graphics->exports.device_duplicator_get_monitor_index)
  2318. return false;
  2319. return thread_graphics->exports.device_duplicator_get_monitor_index(thread_graphics->device, monitor);
  2320. }
  2321. gs_duplicator_t *gs_duplicator_create(int monitor_idx)
  2322. {
  2323. if (!gs_valid("gs_duplicator_create"))
  2324. return NULL;
  2325. if (!thread_graphics->exports.device_duplicator_create)
  2326. return NULL;
  2327. return thread_graphics->exports.device_duplicator_create(thread_graphics->device, monitor_idx);
  2328. }
  2329. void gs_duplicator_destroy(gs_duplicator_t *duplicator)
  2330. {
  2331. if (!gs_valid("gs_duplicator_destroy"))
  2332. return;
  2333. if (!duplicator)
  2334. return;
  2335. if (!thread_graphics->exports.gs_duplicator_destroy)
  2336. return;
  2337. thread_graphics->exports.gs_duplicator_destroy(duplicator);
  2338. }
  2339. bool gs_duplicator_update_frame(gs_duplicator_t *duplicator)
  2340. {
  2341. if (!gs_valid_p("gs_duplicator_update_frame", duplicator))
  2342. return false;
  2343. if (!thread_graphics->exports.gs_duplicator_update_frame)
  2344. return false;
  2345. return thread_graphics->exports.gs_duplicator_update_frame(duplicator);
  2346. }
  2347. bool gs_can_adapter_fast_clear(void)
  2348. {
  2349. if (!gs_valid("gs_can_adapter_fast_clear"))
  2350. return false;
  2351. if (!thread_graphics->exports.device_can_adapter_fast_clear)
  2352. return false;
  2353. return thread_graphics->exports.device_can_adapter_fast_clear(thread_graphics->device);
  2354. }
  2355. gs_texture_t *gs_duplicator_get_texture(gs_duplicator_t *duplicator)
  2356. {
  2357. if (!gs_valid_p("gs_duplicator_get_texture", duplicator))
  2358. return NULL;
  2359. if (!thread_graphics->exports.gs_duplicator_get_texture)
  2360. return NULL;
  2361. return thread_graphics->exports.gs_duplicator_get_texture(duplicator);
  2362. }
  2363. enum gs_color_space gs_duplicator_get_color_space(gs_duplicator_t *duplicator)
  2364. {
  2365. if (!gs_valid_p("gs_duplicator_get_color_space", duplicator))
  2366. return GS_CS_SRGB;
  2367. if (!thread_graphics->exports.gs_duplicator_get_color_space)
  2368. return GS_CS_SRGB;
  2369. return thread_graphics->exports.gs_duplicator_get_color_space(duplicator);
  2370. }
  2371. float gs_duplicator_get_sdr_white_level(gs_duplicator_t *duplicator)
  2372. {
  2373. if (!gs_valid_p("gs_duplicator_get_sdr_white_level", duplicator))
  2374. return 80.f;
  2375. if (!thread_graphics->exports.gs_duplicator_get_sdr_white_level)
  2376. return 80.f;
  2377. return thread_graphics->exports.gs_duplicator_get_sdr_white_level(duplicator);
  2378. }
  2379. /** creates a windows GDI-lockable texture */
  2380. gs_texture_t *gs_texture_create_gdi(uint32_t width, uint32_t height)
  2381. {
  2382. graphics_t *graphics = thread_graphics;
  2383. if (!gs_valid("gs_texture_create_gdi"))
  2384. return NULL;
  2385. if (graphics->exports.device_texture_create_gdi)
  2386. return graphics->exports.device_texture_create_gdi(graphics->device, width, height);
  2387. return NULL;
  2388. }
  2389. void *gs_texture_get_dc(gs_texture_t *gdi_tex)
  2390. {
  2391. if (!gs_valid_p("gs_texture_release_dc", gdi_tex))
  2392. return NULL;
  2393. if (thread_graphics->exports.gs_texture_get_dc)
  2394. return thread_graphics->exports.gs_texture_get_dc(gdi_tex);
  2395. return NULL;
  2396. }
  2397. void gs_texture_release_dc(gs_texture_t *gdi_tex)
  2398. {
  2399. if (!gs_valid_p("gs_texture_release_dc", gdi_tex))
  2400. return;
  2401. if (thread_graphics->exports.gs_texture_release_dc)
  2402. thread_graphics->exports.gs_texture_release_dc(gdi_tex);
  2403. }
  2404. gs_texture_t *gs_texture_open_shared(uint32_t handle)
  2405. {
  2406. graphics_t *graphics = thread_graphics;
  2407. if (!gs_valid("gs_texture_open_shared"))
  2408. return NULL;
  2409. if (graphics->exports.device_texture_open_shared)
  2410. return graphics->exports.device_texture_open_shared(graphics->device, handle);
  2411. return NULL;
  2412. }
  2413. gs_texture_t *gs_texture_open_nt_shared(uint32_t handle)
  2414. {
  2415. graphics_t *graphics = thread_graphics;
  2416. if (!gs_valid("gs_texture_open_nt_shared"))
  2417. return NULL;
  2418. if (graphics->exports.device_texture_open_nt_shared)
  2419. return graphics->exports.device_texture_open_nt_shared(graphics->device, handle);
  2420. return NULL;
  2421. }
  2422. uint32_t gs_texture_get_shared_handle(gs_texture_t *tex)
  2423. {
  2424. graphics_t *graphics = thread_graphics;
  2425. if (!gs_valid("gs_texture_get_shared_handle"))
  2426. return GS_INVALID_HANDLE;
  2427. if (graphics->exports.device_texture_get_shared_handle)
  2428. return graphics->exports.device_texture_get_shared_handle(tex);
  2429. return GS_INVALID_HANDLE;
  2430. }
  2431. gs_texture_t *gs_texture_wrap_obj(void *obj)
  2432. {
  2433. graphics_t *graphics = thread_graphics;
  2434. if (!gs_valid("gs_texture_wrap_obj"))
  2435. return NULL;
  2436. if (graphics->exports.device_texture_wrap_obj)
  2437. return graphics->exports.device_texture_wrap_obj(graphics->device, obj);
  2438. return NULL;
  2439. }
  2440. int gs_texture_acquire_sync(gs_texture_t *tex, uint64_t key, uint32_t ms)
  2441. {
  2442. graphics_t *graphics = thread_graphics;
  2443. if (!gs_valid("gs_texture_acquire_sync"))
  2444. return -1;
  2445. if (graphics->exports.device_texture_acquire_sync)
  2446. return graphics->exports.device_texture_acquire_sync(tex, key, ms);
  2447. return -1;
  2448. }
  2449. int gs_texture_release_sync(gs_texture_t *tex, uint64_t key)
  2450. {
  2451. graphics_t *graphics = thread_graphics;
  2452. if (!gs_valid("gs_texture_release_sync"))
  2453. return -1;
  2454. if (graphics->exports.device_texture_release_sync)
  2455. return graphics->exports.device_texture_release_sync(tex, key);
  2456. return -1;
  2457. }
  2458. gs_stagesurf_t *gs_stagesurface_create_nv12(uint32_t width, uint32_t height)
  2459. {
  2460. graphics_t *graphics = thread_graphics;
  2461. if (!gs_valid("gs_stagesurface_create_nv12"))
  2462. return NULL;
  2463. if ((width & 1) == 1 || (height & 1) == 1) {
  2464. blog(LOG_ERROR, "NV12 textures must have dimensions "
  2465. "divisible by 2.");
  2466. return NULL;
  2467. }
  2468. if (graphics->exports.device_stagesurface_create_nv12)
  2469. return graphics->exports.device_stagesurface_create_nv12(graphics->device, width, height);
  2470. return NULL;
  2471. }
  2472. gs_stagesurf_t *gs_stagesurface_create_p010(uint32_t width, uint32_t height)
  2473. {
  2474. graphics_t *graphics = thread_graphics;
  2475. if (!gs_valid("gs_stagesurface_create_p010"))
  2476. return NULL;
  2477. if ((width & 1) == 1 || (height & 1) == 1) {
  2478. blog(LOG_ERROR, "P010 textures must have dimensions "
  2479. "divisible by 2.");
  2480. return NULL;
  2481. }
  2482. if (graphics->exports.device_stagesurface_create_p010)
  2483. return graphics->exports.device_stagesurface_create_p010(graphics->device, width, height);
  2484. return NULL;
  2485. }
  2486. void gs_register_loss_callbacks(const struct gs_device_loss *callbacks)
  2487. {
  2488. graphics_t *graphics = thread_graphics;
  2489. if (!gs_valid("gs_register_loss_callbacks"))
  2490. return;
  2491. if (graphics->exports.device_register_loss_callbacks)
  2492. graphics->exports.device_register_loss_callbacks(graphics->device, callbacks);
  2493. }
  2494. void gs_unregister_loss_callbacks(void *data)
  2495. {
  2496. graphics_t *graphics = thread_graphics;
  2497. if (!gs_valid("gs_unregister_loss_callbacks"))
  2498. return;
  2499. if (graphics->exports.device_unregister_loss_callbacks)
  2500. graphics->exports.device_unregister_loss_callbacks(graphics->device, data);
  2501. }
  2502. #endif