y_functional_rewrite.inc 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. /*
  2. Legal:
  3. Version: MPL 1.1
  4. The contents of this file are subject to the Mozilla Public License Version
  5. 1.1 the "License"; you may not use this file except in compliance with
  6. the License. You may obtain a copy of the License at
  7. http://www.mozilla.org/MPL/
  8. Software distributed under the License is distributed on an "AS IS" basis,
  9. WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
  10. for the specific language governing rights and limitations under the
  11. License.
  12. The Original Code is the YSI framework.
  13. The Initial Developer of the Original Code is Alex "Y_Less" Cole.
  14. Portions created by the Initial Developer are Copyright C 2011
  15. the Initial Developer. All Rights Reserved.
  16. Contributors:
  17. Y_Less
  18. koolk
  19. JoeBullet/Google63
  20. g_aSlice/Slice
  21. Misiur
  22. samphunter
  23. tianmeta
  24. maddinat0r
  25. spacemud
  26. Crayder
  27. Dayvison
  28. Ahmad45123
  29. Zeex
  30. irinel1996
  31. Yiin-
  32. Chaprnks
  33. Konstantinos
  34. Masterchen09
  35. Southclaws
  36. PatchwerkQWER
  37. m0k1
  38. paulommu
  39. udan111
  40. Thanks:
  41. JoeBullet/Google63 - Handy arbitrary ASM jump code using SCTRL.
  42. ZeeX - Very productive conversations.
  43. koolk - IsPlayerinAreaEx code.
  44. TheAlpha - Danish translation.
  45. breadfish - German translation.
  46. Fireburn - Dutch translation.
  47. yom - French translation.
  48. 50p - Polish translation.
  49. Zamaroht - Spanish translation.
  50. Los - Portuguese translation.
  51. Dracoblue, sintax, mabako, Xtreme, other coders - Producing other modes for
  52. me to strive to better.
  53. Pixels^ - Running XScripters where the idea was born.
  54. Matite - Pestering me to release it and using it.
  55. Very special thanks to:
  56. Thiadmer - PAWN, whose limits continue to amaze me!
  57. Kye/Kalcor - SA:MP.
  58. SA:MP Team past, present and future - SA:MP.
  59. Optional plugins:
  60. Gamer_Z - GPS.
  61. Incognito - Streamer.
  62. Me - sscanf2, fixes2, Whirlpool.
  63. */
  64. static stock
  65. YSI_g_sFunctionalLastEnd = cellmin,
  66. YSI_g_sFunctionalExpectedDepth,
  67. YSI_g_sFunctionalStartPos,
  68. YSI_g_sFunctionalEndPos,
  69. YSI_g_sFunctionalAfterPos;
  70. //static stock Functional_AddToStack()
  71. //{
  72. // // Is this the first of a nested set of lambda calls?
  73. // // Get the desired stack size. This is passed as the parameter count, not as a real parameter,
  74. // // to save instruction space. So extract that.
  75. // new
  76. // size;
  77. //
  78. // #emit LOAD.S.pri 8
  79. // #emit STOR.S.pri size
  80. //
  81. //
  82. // // Now when we return, we want to remove a lot of stuff from the stack - all the extra data that
  83. // // we just backed up to the heap. To do this, we increase the parameter count to match the size
  84. // // of the data to drop, then return.
  85. // #emit ZERO.pri
  86. // #emit STOR.S.pri 8
  87. //
  88. // #emit RETN
  89. // return 0;
  90. //}
  91. stock LAM@1(idx = 0, pattern0 = 1, pattern1 = 1)
  92. {
  93. #pragma unused idx, pattern0, pattern1
  94. }
  95. #define CALL@LAM@1 LAM@1(1)
  96. stock LAM@2(par)
  97. {
  98. #pragma unused par
  99. }
  100. #define CALL@LAM@2 LAM@2(1)
  101. stock LAM@0(idx = 0)
  102. {
  103. #pragma unused idx
  104. return 0;
  105. }
  106. #define CALL@LAM@0 LAM@0(1)
  107. stock LAM@5(idx = 0, pattern0 = 1, pattern1 = 1, pattern2 = 1, pattern3 = 1)
  108. {
  109. #pragma unused idx, pattern0, pattern1, pattern2, pattern3
  110. return 0;
  111. }
  112. #define CALL@LAM@5 LAM@5(1)
  113. static stock Functional_FoundStart(const scanner[CodeScanner])
  114. {
  115. if (YSI_g_sFunctionalStartPos != 0)
  116. return -1;
  117. P:4("Functional_FoundStart called");
  118. YSI_g_sFunctionalStartPos = CodeScanGetMatchAddressData(scanner);
  119. // NOP the call out.
  120. CodeScanNOPMatch(scanner);
  121. P:5("Functional_FoundStart done");
  122. if (YSI_g_sFunctionalExpectedDepth < 0)
  123. {
  124. YSI_g_sFunctionalExpectedDepth = CodeScanGetMatchStack(scanner);
  125. }
  126. return 0;
  127. }
  128. static stock Functional_FoundEnd(const scanner[CodeScanner])
  129. {
  130. // This must always immediately follow the corresponding `LAM@1`, so if we haven't seen the
  131. // correct one yet, don't do anything.
  132. if (YSI_g_sFunctionalStartPos == 0)
  133. return -1;
  134. if (YSI_g_sFunctionalEndPos != 0)
  135. return -1;
  136. P:4("Functional_FoundEnd called");
  137. YSI_g_sFunctionalEndPos = CodeScanGetMatchAddressData(scanner);
  138. CodeScanNOPMatch(scanner);
  139. P:5("Functional_FoundEnd done");
  140. return 0;
  141. }
  142. static stock Functional_FoundAfter(const scanner[CodeScanner])
  143. {
  144. if (YSI_g_sFunctionalAfterPos != 0)
  145. return -1;
  146. P:4("Functional_FoundAfter called");
  147. YSI_g_sFunctionalAfterPos = CodeScanGetMatchAddressData(scanner);
  148. CodeScanNOPMatch(scanner);
  149. P:5("Functional_FoundAfter done");
  150. // Immediately end the scanner.
  151. if (YSI_g_sFunctionalAfterPos > YSI_g_sFunctionalLastEnd)
  152. YSI_g_sFunctionalLastEnd = YSI_g_sFunctionalAfterPos;
  153. return cellmin;
  154. }
  155. static stock Functional_FoundCall1(const scanner[CodeScanner])
  156. {
  157. Functional_FoundCall(scanner, CodeScanGetMatchHole(scanner, 0));
  158. }
  159. static stock Functional_FoundCall2(const scanner[CodeScanner])
  160. {
  161. Functional_FoundCall(scanner, 0);
  162. }
  163. static stock Functional_FoundCall(const scanner[CodeScanner], nestingLevel)
  164. {
  165. P:4("Functional_FoundCall called: %d", nestingLevel);
  166. // Found the code scanner itself - ignore this one.
  167. if (nestingLevel > 0)
  168. {
  169. return -1;
  170. }
  171. new
  172. callPos = CodeScanGetMatchAddressData(scanner);
  173. CodeScanNOPMatch(scanner);
  174. if (callPos > YSI_g_sFunctionalLastEnd)
  175. {
  176. YSI_g_sFunctionalExpectedDepth = -1;
  177. }
  178. // Start a new scanner at the point this scanner ended.
  179. new
  180. second[CodeScanner];
  181. CodeScanClone(second, scanner);
  182. new lambdaStart0[CodeScanMatcher];
  183. CodeScanMatcherInit(lambdaStart0, &Functional_FoundStart);
  184. CodeScanMatcherPattern(lambdaStart0,
  185. OP(PUSH_C, 1)
  186. OP(PUSH_C, 1)
  187. OP(PUSH_C, nestingLevel)
  188. OP(PUSH_C, 12)
  189. OP(CALL, &LAM@1)
  190. );
  191. CodeScanAddMatcher(second, lambdaStart0);
  192. new lambdaStart1[CodeScanMatcher];
  193. CodeScanMatcherInit(lambdaStart1, &Functional_FoundStart);
  194. CodeScanMatcherPattern(lambdaStart1,
  195. OP(CONST_PRI, 1)
  196. OP(PUSH_PRI)
  197. OP(CONST_PRI, 1)
  198. OP(PUSH_PRI)
  199. OP(CONST_PRI, nestingLevel)
  200. OP(PUSH_PRI)
  201. OP(PUSH_C, 12)
  202. OP(CALL, &LAM@1)
  203. );
  204. CodeScanAddMatcher(second, lambdaStart1);
  205. // Needs to stay in scope.
  206. new lambdaStart2[CodeScanMatcher];
  207. if (nestingLevel == 0)
  208. {
  209. CodeScanMatcherInit(lambdaStart2, &Functional_FoundStart);
  210. CodeScanMatcherPattern(lambdaStart2,
  211. OP(CONST_PRI, 1)
  212. OP(PUSH_PRI)
  213. OP(CONST_PRI, 1)
  214. OP(PUSH_PRI)
  215. OP(ZERO_PRI)
  216. OP(PUSH_PRI)
  217. OP(PUSH_C, 12)
  218. OP(CALL, &LAM@1)
  219. );
  220. CodeScanAddMatcher(second, lambdaStart2);
  221. }
  222. new lambdaEnd[CodeScanMatcher];
  223. CodeScanMatcherInit(lambdaEnd, &Functional_FoundEnd);
  224. CodeScanMatcherPattern(lambdaEnd,
  225. OP(PUSH_PRI)
  226. OP(PUSH_C, 4)
  227. OP(CALL, &LAM@2)
  228. );
  229. CodeScanAddMatcher(second, lambdaEnd);
  230. new lambdaAfter0[CodeScanMatcher];
  231. CodeScanMatcherInit(lambdaAfter0, &Functional_FoundAfter);
  232. CodeScanMatcherPattern(lambdaAfter0,
  233. OP(PUSH_C, 1)
  234. OP(PUSH_C, 1)
  235. OP(PUSH_C, 1)
  236. OP(PUSH_C, 1)
  237. OP(PUSH_C, nestingLevel)
  238. OP(PUSH_C, 20)
  239. OP(CALL, &LAM@5)
  240. );
  241. CodeScanAddMatcher(second, lambdaAfter0);
  242. new lambdaAfter1[CodeScanMatcher];
  243. CodeScanMatcherInit(lambdaAfter1, &Functional_FoundAfter);
  244. CodeScanMatcherPattern(lambdaAfter1,
  245. OP(CONST_PRI, 1)
  246. OP(PUSH_PRI)
  247. OP(CONST_PRI, 1)
  248. OP(PUSH_PRI)
  249. OP(CONST_PRI, 1)
  250. OP(PUSH_PRI)
  251. OP(CONST_PRI, 1)
  252. OP(PUSH_PRI)
  253. OP(CONST_PRI, nestingLevel)
  254. OP(PUSH_PRI)
  255. OP(PUSH_C, 20)
  256. OP(CALL, &LAM@5)
  257. );
  258. CodeScanAddMatcher(second, lambdaAfter1);
  259. new lambdaAfter2[CodeScanMatcher];
  260. if (nestingLevel == 0)
  261. {
  262. CodeScanMatcherInit(lambdaAfter2, &Functional_FoundAfter);
  263. CodeScanMatcherPattern(lambdaAfter2,
  264. OP(CONST_PRI, 1)
  265. OP(PUSH_PRI)
  266. OP(CONST_PRI, 1)
  267. OP(PUSH_PRI)
  268. OP(CONST_PRI, 1)
  269. OP(PUSH_PRI)
  270. OP(CONST_PRI, 1)
  271. OP(PUSH_PRI)
  272. OP(ZERO_PRI)
  273. OP(PUSH_PRI)
  274. OP(PUSH_C, 20)
  275. OP(CALL, &LAM@5)
  276. );
  277. CodeScanAddMatcher(second, lambdaAfter2);
  278. }
  279. YSI_g_sFunctionalStartPos = 0;
  280. YSI_g_sFunctionalEndPos = 0;
  281. YSI_g_sFunctionalAfterPos = 0;
  282. CodeScanRun(second);
  283. // We now have three bits of data - the call position (`callPos`), the start of the lambda code
  284. // (`YSI_g_sFunctionalStartPos`), and the end of the lambda code (`YSI_g_sFunctionalEndPos`).
  285. // Turn `callPos` in to a jump to `YSI_g_sFunctionalStartPos + 8`, `YSI_g_sFunctionalEndPos` in
  286. // to a jump back to `callPos + 8`, and `YSI_g_sFunctionalStartPos` in to a jump to
  287. // `YSI_g_sFunctionalAfterPos`.
  288. P:5("Functional_FoundCall: generating %d %d %d %d", callPos, YSI_g_sFunctionalStartPos, YSI_g_sFunctionalEndPos, YSI_g_sFunctionalAfterPos);
  289. new
  290. ctx[AsmContext],
  291. excess = CodeScanGetMatchStack(scanner) - YSI_g_sFunctionalExpectedDepth;
  292. AsmInitPtr(ctx, callPos, 16);
  293. // The relative jumps are all `+ 8` internally, since they add on the size of the `JUMP` OP.
  294. @emit JUMP.rel (YSI_g_sFunctionalStartPos - callPos)
  295. @emit LOAD.pri ref(I@)
  296. //@emit PUSH.C -YSI_g_sFunctionalExpectedDepth
  297. //@emit CALL 0
  298. AsmInitPtr(ctx, YSI_g_sFunctionalEndPos, 8);
  299. @emit STOR.pri ref(I@)
  300. AsmInitPtr(ctx, YSI_g_sFunctionalStartPos, 40);
  301. if (excess)
  302. {
  303. @emit JUMP.rel (YSI_g_sFunctionalAfterPos + 48 - YSI_g_sFunctionalStartPos)
  304. // Dump this bit of excess stack to the heap.
  305. @emit HEAP excess
  306. @emit LCTRL 4
  307. @emit MOVS excess
  308. @emit STACK excess
  309. }
  310. else
  311. {
  312. @emit JUMP.rel (YSI_g_sFunctionalAfterPos - YSI_g_sFunctionalStartPos)
  313. }
  314. //@emit PUSH.C YSI_g_sFunctionalExpectedDepth
  315. //@emit CALL 0
  316. AsmInitPtr(ctx, YSI_g_sFunctionalAfterPos, 56);
  317. if (excess)
  318. {
  319. // Dump this bit of excess stack to the heap.
  320. @emit STACK -excess
  321. @emit STACK 0
  322. @emit LCTRL 2
  323. @emit ADD.C -excess
  324. @emit MOVS excess
  325. @emit SCTRL 2
  326. @emit JUMP.rel (callPos - YSI_g_sFunctionalAfterPos - 48)
  327. }
  328. else
  329. {
  330. @emit JUMP.rel (callPos - YSI_g_sFunctionalAfterPos)
  331. }
  332. // Code was written - rescan the whole function.
  333. return 1;
  334. }
  335. public OnCodeInit()
  336. {
  337. P:2("Functional_OnCodeInit called");
  338. // Look for any calls to `LAM@0`, then find the next call to `LAM@1` with a matching parameter
  339. // number (the exact value is unimportant; however, for now we know that it is always `<= 0`, so
  340. // if the parameter is `> 0` ignore it - it's the code scanner itself). Get rid of all the
  341. // calls and change them in to jumps to each other.
  342. new scanner[CodeScanner];
  343. CodeScanInit(scanner);
  344. new lambdaCall0[CodeScanMatcher];
  345. CodeScanMatcherInit(lambdaCall0, &Functional_FoundCall1);
  346. CodeScanMatcherPattern(lambdaCall0,
  347. OP(PUSH_C, ???)
  348. OP(PUSH_C, 4)
  349. OP(CALL, &LAM@0)
  350. );
  351. CodeScanAddMatcher(scanner, lambdaCall0);
  352. new lambdaCall1[CodeScanMatcher];
  353. CodeScanMatcherInit(lambdaCall1, &Functional_FoundCall1);
  354. CodeScanMatcherPattern(lambdaCall1,
  355. OP(CONST_PRI, ???)
  356. OP(PUSH_PRI)
  357. OP(PUSH_C, 4)
  358. OP(CALL, &LAM@0)
  359. );
  360. CodeScanAddMatcher(scanner, lambdaCall1);
  361. new lambdaCall2[CodeScanMatcher];
  362. CodeScanMatcherInit(lambdaCall2, &Functional_FoundCall2);
  363. CodeScanMatcherPattern(lambdaCall2,
  364. OP(ZERO_PRI)
  365. OP(PUSH_PRI)
  366. OP(PUSH_C, 4)
  367. OP(CALL, &LAM@0)
  368. );
  369. CodeScanAddMatcher(scanner, lambdaCall2);
  370. // Run fast, with an explicit search function.
  371. CodeScanRunFast(scanner, &LAM@0);
  372. #if defined Functional_OnCodeInit
  373. Functional_OnCodeInit();
  374. #endif
  375. return 1;
  376. }
  377. #undef OnCodeInit
  378. #define OnCodeInit Functional_OnCodeInit
  379. #if defined Functional_OnCodeInit
  380. forward Functional_OnCodeInit();
  381. #endif
  382. /*
  383. - The original code is:
  384. myVar = FoldR({ _0 + _1 }, arr, 0);
  385. - The compiled code generated is:
  386. myVar = @LAM0();
  387. {
  388. @LAM1();
  389. inline Func(_0, _1) @return _0 + _1;
  390. @LAM2(FoldR("Func", arr, 0));
  391. }
  392. - The rewritten code is:
  393. goto Func:
  394. :Write
  395. myVar = I@;
  396. goto Cont;
  397. {
  398. :Func
  399. inline Func(_0, _1) @return _0 + _1;
  400. I@ = FoldR("Func", arr, 0);
  401. goto Write;
  402. }
  403. :Cont
  404. This jumping about is done so that lambdas may be used in the middle of expressions.
  405. */