fastcover.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728
  1. /*-*************************************
  2. * Dependencies
  3. ***************************************/
  4. #include <stdio.h> /* fprintf */
  5. #include <stdlib.h> /* malloc, free, qsort */
  6. #include <string.h> /* memset */
  7. #include <time.h> /* clock */
  8. #include "mem.h" /* read */
  9. #include "pool.h"
  10. #include "threading.h"
  11. #include "cover.h"
  12. #include "zstd_internal.h" /* includes zstd.h */
  13. #ifndef ZDICT_STATIC_LINKING_ONLY
  14. #define ZDICT_STATIC_LINKING_ONLY
  15. #endif
  16. #include "zdict.h"
  17. /*-*************************************
  18. * Constants
  19. ***************************************/
  20. #define FASTCOVER_MAX_SAMPLES_SIZE (sizeof(size_t) == 8 ? ((unsigned)-1) : ((unsigned)1 GB))
  21. #define FASTCOVER_MAX_F 31
  22. #define FASTCOVER_MAX_ACCEL 10
  23. #define DEFAULT_SPLITPOINT 0.75
  24. #define DEFAULT_F 20
  25. #define DEFAULT_ACCEL 1
  26. /*-*************************************
  27. * Console display
  28. ***************************************/
  29. static int g_displayLevel = 2;
  30. #define DISPLAY(...) \
  31. { \
  32. fprintf(stderr, __VA_ARGS__); \
  33. fflush(stderr); \
  34. }
  35. #define LOCALDISPLAYLEVEL(displayLevel, l, ...) \
  36. if (displayLevel >= l) { \
  37. DISPLAY(__VA_ARGS__); \
  38. } /* 0 : no display; 1: errors; 2: default; 3: details; 4: debug */
  39. #define DISPLAYLEVEL(l, ...) LOCALDISPLAYLEVEL(g_displayLevel, l, __VA_ARGS__)
  40. #define LOCALDISPLAYUPDATE(displayLevel, l, ...) \
  41. if (displayLevel >= l) { \
  42. if ((clock() - g_time > refreshRate) || (displayLevel >= 4)) { \
  43. g_time = clock(); \
  44. DISPLAY(__VA_ARGS__); \
  45. } \
  46. }
  47. #define DISPLAYUPDATE(l, ...) LOCALDISPLAYUPDATE(g_displayLevel, l, __VA_ARGS__)
  48. static const clock_t refreshRate = CLOCKS_PER_SEC * 15 / 100;
  49. static clock_t g_time = 0;
  50. /*-*************************************
  51. * Hash Functions
  52. ***************************************/
  53. static const U64 prime6bytes = 227718039650203ULL;
  54. static size_t ZSTD_hash6(U64 u, U32 h) { return (size_t)(((u << (64-48)) * prime6bytes) >> (64-h)) ; }
  55. static size_t ZSTD_hash6Ptr(const void* p, U32 h) { return ZSTD_hash6(MEM_readLE64(p), h); }
  56. static const U64 prime8bytes = 0xCF1BBCDCB7A56463ULL;
  57. static size_t ZSTD_hash8(U64 u, U32 h) { return (size_t)(((u) * prime8bytes) >> (64-h)) ; }
  58. static size_t ZSTD_hash8Ptr(const void* p, U32 h) { return ZSTD_hash8(MEM_readLE64(p), h); }
  59. /**
  60. * Hash the d-byte value pointed to by p and mod 2^f
  61. */
  62. static size_t FASTCOVER_hashPtrToIndex(const void* p, U32 h, unsigned d) {
  63. if (d == 6) {
  64. return ZSTD_hash6Ptr(p, h) & ((1 << h) - 1);
  65. }
  66. return ZSTD_hash8Ptr(p, h) & ((1 << h) - 1);
  67. }
  68. /*-*************************************
  69. * Acceleration
  70. ***************************************/
  71. typedef struct {
  72. unsigned finalize; /* Percentage of training samples used for ZDICT_finalizeDictionary */
  73. unsigned skip; /* Number of dmer skipped between each dmer counted in computeFrequency */
  74. } FASTCOVER_accel_t;
  75. static const FASTCOVER_accel_t FASTCOVER_defaultAccelParameters[FASTCOVER_MAX_ACCEL+1] = {
  76. { 100, 0 }, /* accel = 0, should not happen because accel = 0 defaults to accel = 1 */
  77. { 100, 0 }, /* accel = 1 */
  78. { 50, 1 }, /* accel = 2 */
  79. { 34, 2 }, /* accel = 3 */
  80. { 25, 3 }, /* accel = 4 */
  81. { 20, 4 }, /* accel = 5 */
  82. { 17, 5 }, /* accel = 6 */
  83. { 14, 6 }, /* accel = 7 */
  84. { 13, 7 }, /* accel = 8 */
  85. { 11, 8 }, /* accel = 9 */
  86. { 10, 9 }, /* accel = 10 */
  87. };
  88. /*-*************************************
  89. * Context
  90. ***************************************/
  91. typedef struct {
  92. const BYTE *samples;
  93. size_t *offsets;
  94. const size_t *samplesSizes;
  95. size_t nbSamples;
  96. size_t nbTrainSamples;
  97. size_t nbTestSamples;
  98. size_t nbDmers;
  99. U32 *freqs;
  100. unsigned d;
  101. unsigned f;
  102. FASTCOVER_accel_t accelParams;
  103. } FASTCOVER_ctx_t;
  104. /*-*************************************
  105. * Helper functions
  106. ***************************************/
  107. /**
  108. * Selects the best segment in an epoch.
  109. * Segments of are scored according to the function:
  110. *
  111. * Let F(d) be the frequency of all dmers with hash value d.
  112. * Let S_i be hash value of the dmer at position i of segment S which has length k.
  113. *
  114. * Score(S) = F(S_1) + F(S_2) + ... + F(S_{k-d+1})
  115. *
  116. * Once the dmer with hash value d is in the dictionay we set F(d) = 0.
  117. */
  118. static COVER_segment_t FASTCOVER_selectSegment(const FASTCOVER_ctx_t *ctx,
  119. U32 *freqs, U32 begin, U32 end,
  120. ZDICT_cover_params_t parameters,
  121. U16* segmentFreqs) {
  122. /* Constants */
  123. const U32 k = parameters.k;
  124. const U32 d = parameters.d;
  125. const U32 f = ctx->f;
  126. const U32 dmersInK = k - d + 1;
  127. /* Try each segment (activeSegment) and save the best (bestSegment) */
  128. COVER_segment_t bestSegment = {0, 0, 0};
  129. COVER_segment_t activeSegment;
  130. /* Reset the activeDmers in the segment */
  131. /* The activeSegment starts at the beginning of the epoch. */
  132. activeSegment.begin = begin;
  133. activeSegment.end = begin;
  134. activeSegment.score = 0;
  135. /* Slide the activeSegment through the whole epoch.
  136. * Save the best segment in bestSegment.
  137. */
  138. while (activeSegment.end < end) {
  139. /* Get hash value of current dmer */
  140. const size_t idx = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.end, f, d);
  141. /* Add frequency of this index to score if this is the first occurence of index in active segment */
  142. if (segmentFreqs[idx] == 0) {
  143. activeSegment.score += freqs[idx];
  144. }
  145. /* Increment end of segment and segmentFreqs*/
  146. activeSegment.end += 1;
  147. segmentFreqs[idx] += 1;
  148. /* If the window is now too large, drop the first position */
  149. if (activeSegment.end - activeSegment.begin == dmersInK + 1) {
  150. /* Get hash value of the dmer to be eliminated from active segment */
  151. const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
  152. segmentFreqs[delIndex] -= 1;
  153. /* Subtract frequency of this index from score if this is the last occurrence of this index in active segment */
  154. if (segmentFreqs[delIndex] == 0) {
  155. activeSegment.score -= freqs[delIndex];
  156. }
  157. /* Increment start of segment */
  158. activeSegment.begin += 1;
  159. }
  160. /* If this segment is the best so far save it */
  161. if (activeSegment.score > bestSegment.score) {
  162. bestSegment = activeSegment;
  163. }
  164. }
  165. /* Zero out rest of segmentFreqs array */
  166. while (activeSegment.begin < end) {
  167. const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
  168. segmentFreqs[delIndex] -= 1;
  169. activeSegment.begin += 1;
  170. }
  171. {
  172. /* Zero the frequency of hash value of each dmer covered by the chosen segment. */
  173. U32 pos;
  174. for (pos = bestSegment.begin; pos != bestSegment.end; ++pos) {
  175. const size_t i = FASTCOVER_hashPtrToIndex(ctx->samples + pos, f, d);
  176. freqs[i] = 0;
  177. }
  178. }
  179. return bestSegment;
  180. }
  181. static int FASTCOVER_checkParameters(ZDICT_cover_params_t parameters,
  182. size_t maxDictSize, unsigned f,
  183. unsigned accel) {
  184. /* k, d, and f are required parameters */
  185. if (parameters.d == 0 || parameters.k == 0) {
  186. return 0;
  187. }
  188. /* d has to be 6 or 8 */
  189. if (parameters.d != 6 && parameters.d != 8) {
  190. return 0;
  191. }
  192. /* k <= maxDictSize */
  193. if (parameters.k > maxDictSize) {
  194. return 0;
  195. }
  196. /* d <= k */
  197. if (parameters.d > parameters.k) {
  198. return 0;
  199. }
  200. /* 0 < f <= FASTCOVER_MAX_F*/
  201. if (f > FASTCOVER_MAX_F || f == 0) {
  202. return 0;
  203. }
  204. /* 0 < splitPoint <= 1 */
  205. if (parameters.splitPoint <= 0 || parameters.splitPoint > 1) {
  206. return 0;
  207. }
  208. /* 0 < accel <= 10 */
  209. if (accel > 10 || accel == 0) {
  210. return 0;
  211. }
  212. return 1;
  213. }
  214. /**
  215. * Clean up a context initialized with `FASTCOVER_ctx_init()`.
  216. */
  217. static void
  218. FASTCOVER_ctx_destroy(FASTCOVER_ctx_t* ctx)
  219. {
  220. if (!ctx) return;
  221. free(ctx->freqs);
  222. ctx->freqs = NULL;
  223. free(ctx->offsets);
  224. ctx->offsets = NULL;
  225. }
  226. /**
  227. * Calculate for frequency of hash value of each dmer in ctx->samples
  228. */
  229. static void
  230. FASTCOVER_computeFrequency(U32* freqs, const FASTCOVER_ctx_t* ctx)
  231. {
  232. const unsigned f = ctx->f;
  233. const unsigned d = ctx->d;
  234. const unsigned skip = ctx->accelParams.skip;
  235. const unsigned readLength = MAX(d, 8);
  236. size_t i;
  237. assert(ctx->nbTrainSamples >= 5);
  238. assert(ctx->nbTrainSamples <= ctx->nbSamples);
  239. for (i = 0; i < ctx->nbTrainSamples; i++) {
  240. size_t start = ctx->offsets[i]; /* start of current dmer */
  241. size_t const currSampleEnd = ctx->offsets[i+1];
  242. while (start + readLength <= currSampleEnd) {
  243. const size_t dmerIndex = FASTCOVER_hashPtrToIndex(ctx->samples + start, f, d);
  244. freqs[dmerIndex]++;
  245. start = start + skip + 1;
  246. }
  247. }
  248. }
  249. /**
  250. * Prepare a context for dictionary building.
  251. * The context is only dependent on the parameter `d` and can used multiple
  252. * times.
  253. * Returns 1 on success or zero on error.
  254. * The context must be destroyed with `FASTCOVER_ctx_destroy()`.
  255. */
  256. static int
  257. FASTCOVER_ctx_init(FASTCOVER_ctx_t* ctx,
  258. const void* samplesBuffer,
  259. const size_t* samplesSizes, unsigned nbSamples,
  260. unsigned d, double splitPoint, unsigned f,
  261. FASTCOVER_accel_t accelParams)
  262. {
  263. const BYTE* const samples = (const BYTE*)samplesBuffer;
  264. const size_t totalSamplesSize = COVER_sum(samplesSizes, nbSamples);
  265. /* Split samples into testing and training sets */
  266. const unsigned nbTrainSamples = splitPoint < 1.0 ? (unsigned)((double)nbSamples * splitPoint) : nbSamples;
  267. const unsigned nbTestSamples = splitPoint < 1.0 ? nbSamples - nbTrainSamples : nbSamples;
  268. const size_t trainingSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes, nbTrainSamples) : totalSamplesSize;
  269. const size_t testSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes + nbTrainSamples, nbTestSamples) : totalSamplesSize;
  270. /* Checks */
  271. if (totalSamplesSize < MAX(d, sizeof(U64)) ||
  272. totalSamplesSize >= (size_t)FASTCOVER_MAX_SAMPLES_SIZE) {
  273. DISPLAYLEVEL(1, "Total samples size is too large (%u MB), maximum size is %u MB\n",
  274. (unsigned)(totalSamplesSize >> 20), (FASTCOVER_MAX_SAMPLES_SIZE >> 20));
  275. return 0;
  276. }
  277. /* Check if there are at least 5 training samples */
  278. if (nbTrainSamples < 5) {
  279. DISPLAYLEVEL(1, "Total number of training samples is %u and is invalid\n", nbTrainSamples);
  280. return 0;
  281. }
  282. /* Check if there's testing sample */
  283. if (nbTestSamples < 1) {
  284. DISPLAYLEVEL(1, "Total number of testing samples is %u and is invalid.\n", nbTestSamples);
  285. return 0;
  286. }
  287. /* Zero the context */
  288. memset(ctx, 0, sizeof(*ctx));
  289. DISPLAYLEVEL(2, "Training on %u samples of total size %u\n", nbTrainSamples,
  290. (unsigned)trainingSamplesSize);
  291. DISPLAYLEVEL(2, "Testing on %u samples of total size %u\n", nbTestSamples,
  292. (unsigned)testSamplesSize);
  293. ctx->samples = samples;
  294. ctx->samplesSizes = samplesSizes;
  295. ctx->nbSamples = nbSamples;
  296. ctx->nbTrainSamples = nbTrainSamples;
  297. ctx->nbTestSamples = nbTestSamples;
  298. ctx->nbDmers = trainingSamplesSize - MAX(d, sizeof(U64)) + 1;
  299. ctx->d = d;
  300. ctx->f = f;
  301. ctx->accelParams = accelParams;
  302. /* The offsets of each file */
  303. ctx->offsets = (size_t*)calloc((nbSamples + 1), sizeof(size_t));
  304. if (ctx->offsets == NULL) {
  305. DISPLAYLEVEL(1, "Failed to allocate scratch buffers \n");
  306. FASTCOVER_ctx_destroy(ctx);
  307. return 0;
  308. }
  309. /* Fill offsets from the samplesSizes */
  310. { U32 i;
  311. ctx->offsets[0] = 0;
  312. assert(nbSamples >= 5);
  313. for (i = 1; i <= nbSamples; ++i) {
  314. ctx->offsets[i] = ctx->offsets[i - 1] + samplesSizes[i - 1];
  315. }
  316. }
  317. /* Initialize frequency array of size 2^f */
  318. ctx->freqs = (U32*)calloc(((U64)1 << f), sizeof(U32));
  319. if (ctx->freqs == NULL) {
  320. DISPLAYLEVEL(1, "Failed to allocate frequency table \n");
  321. FASTCOVER_ctx_destroy(ctx);
  322. return 0;
  323. }
  324. DISPLAYLEVEL(2, "Computing frequencies\n");
  325. FASTCOVER_computeFrequency(ctx->freqs, ctx);
  326. return 1;
  327. }
  328. /**
  329. * Given the prepared context build the dictionary.
  330. */
  331. static size_t
  332. FASTCOVER_buildDictionary(const FASTCOVER_ctx_t* ctx,
  333. U32* freqs,
  334. void* dictBuffer, size_t dictBufferCapacity,
  335. ZDICT_cover_params_t parameters,
  336. U16* segmentFreqs)
  337. {
  338. BYTE *const dict = (BYTE *)dictBuffer;
  339. size_t tail = dictBufferCapacity;
  340. /* Divide the data up into epochs of equal size.
  341. * We will select at least one segment from each epoch.
  342. */
  343. const unsigned epochs = MAX(1, (U32)(dictBufferCapacity / parameters.k));
  344. const unsigned epochSize = (U32)(ctx->nbDmers / epochs);
  345. size_t epoch;
  346. DISPLAYLEVEL(2, "Breaking content into %u epochs of size %u\n",
  347. epochs, epochSize);
  348. /* Loop through the epochs until there are no more segments or the dictionary
  349. * is full.
  350. */
  351. for (epoch = 0; tail > 0; epoch = (epoch + 1) % epochs) {
  352. const U32 epochBegin = (U32)(epoch * epochSize);
  353. const U32 epochEnd = epochBegin + epochSize;
  354. size_t segmentSize;
  355. /* Select a segment */
  356. COVER_segment_t segment = FASTCOVER_selectSegment(
  357. ctx, freqs, epochBegin, epochEnd, parameters, segmentFreqs);
  358. /* If the segment covers no dmers, then we are out of content */
  359. if (segment.score == 0) {
  360. break;
  361. }
  362. /* Trim the segment if necessary and if it is too small then we are done */
  363. segmentSize = MIN(segment.end - segment.begin + parameters.d - 1, tail);
  364. if (segmentSize < parameters.d) {
  365. break;
  366. }
  367. /* We fill the dictionary from the back to allow the best segments to be
  368. * referenced with the smallest offsets.
  369. */
  370. tail -= segmentSize;
  371. memcpy(dict + tail, ctx->samples + segment.begin, segmentSize);
  372. DISPLAYUPDATE(
  373. 2, "\r%u%% ",
  374. (unsigned)(((dictBufferCapacity - tail) * 100) / dictBufferCapacity));
  375. }
  376. DISPLAYLEVEL(2, "\r%79s\r", "");
  377. return tail;
  378. }
  379. /**
  380. * Parameters for FASTCOVER_tryParameters().
  381. */
  382. typedef struct FASTCOVER_tryParameters_data_s {
  383. const FASTCOVER_ctx_t* ctx;
  384. COVER_best_t* best;
  385. size_t dictBufferCapacity;
  386. ZDICT_cover_params_t parameters;
  387. } FASTCOVER_tryParameters_data_t;
  388. /**
  389. * Tries a set of parameters and updates the COVER_best_t with the results.
  390. * This function is thread safe if zstd is compiled with multithreaded support.
  391. * It takes its parameters as an *OWNING* opaque pointer to support threading.
  392. */
  393. static void FASTCOVER_tryParameters(void *opaque)
  394. {
  395. /* Save parameters as local variables */
  396. FASTCOVER_tryParameters_data_t *const data = (FASTCOVER_tryParameters_data_t *)opaque;
  397. const FASTCOVER_ctx_t *const ctx = data->ctx;
  398. const ZDICT_cover_params_t parameters = data->parameters;
  399. size_t dictBufferCapacity = data->dictBufferCapacity;
  400. size_t totalCompressedSize = ERROR(GENERIC);
  401. /* Initialize array to keep track of frequency of dmer within activeSegment */
  402. U16* segmentFreqs = (U16 *)calloc(((U64)1 << ctx->f), sizeof(U16));
  403. /* Allocate space for hash table, dict, and freqs */
  404. BYTE *const dict = (BYTE * const)malloc(dictBufferCapacity);
  405. U32 *freqs = (U32*) malloc(((U64)1 << ctx->f) * sizeof(U32));
  406. if (!segmentFreqs || !dict || !freqs) {
  407. DISPLAYLEVEL(1, "Failed to allocate buffers: out of memory\n");
  408. goto _cleanup;
  409. }
  410. /* Copy the frequencies because we need to modify them */
  411. memcpy(freqs, ctx->freqs, ((U64)1 << ctx->f) * sizeof(U32));
  412. /* Build the dictionary */
  413. { const size_t tail = FASTCOVER_buildDictionary(ctx, freqs, dict, dictBufferCapacity,
  414. parameters, segmentFreqs);
  415. const unsigned nbFinalizeSamples = (unsigned)(ctx->nbTrainSamples * ctx->accelParams.finalize / 100);
  416. dictBufferCapacity = ZDICT_finalizeDictionary(
  417. dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
  418. ctx->samples, ctx->samplesSizes, nbFinalizeSamples, parameters.zParams);
  419. if (ZDICT_isError(dictBufferCapacity)) {
  420. DISPLAYLEVEL(1, "Failed to finalize dictionary\n");
  421. goto _cleanup;
  422. }
  423. }
  424. /* Check total compressed size */
  425. totalCompressedSize = COVER_checkTotalCompressedSize(parameters, ctx->samplesSizes,
  426. ctx->samples, ctx->offsets,
  427. ctx->nbTrainSamples, ctx->nbSamples,
  428. dict, dictBufferCapacity);
  429. _cleanup:
  430. COVER_best_finish(data->best, totalCompressedSize, parameters, dict,
  431. dictBufferCapacity);
  432. free(data);
  433. free(segmentFreqs);
  434. free(dict);
  435. free(freqs);
  436. }
  437. static void
  438. FASTCOVER_convertToCoverParams(ZDICT_fastCover_params_t fastCoverParams,
  439. ZDICT_cover_params_t* coverParams)
  440. {
  441. coverParams->k = fastCoverParams.k;
  442. coverParams->d = fastCoverParams.d;
  443. coverParams->steps = fastCoverParams.steps;
  444. coverParams->nbThreads = fastCoverParams.nbThreads;
  445. coverParams->splitPoint = fastCoverParams.splitPoint;
  446. coverParams->zParams = fastCoverParams.zParams;
  447. }
  448. static void
  449. FASTCOVER_convertToFastCoverParams(ZDICT_cover_params_t coverParams,
  450. ZDICT_fastCover_params_t* fastCoverParams,
  451. unsigned f, unsigned accel)
  452. {
  453. fastCoverParams->k = coverParams.k;
  454. fastCoverParams->d = coverParams.d;
  455. fastCoverParams->steps = coverParams.steps;
  456. fastCoverParams->nbThreads = coverParams.nbThreads;
  457. fastCoverParams->splitPoint = coverParams.splitPoint;
  458. fastCoverParams->f = f;
  459. fastCoverParams->accel = accel;
  460. fastCoverParams->zParams = coverParams.zParams;
  461. }
  462. ZDICTLIB_API size_t
  463. ZDICT_trainFromBuffer_fastCover(void* dictBuffer, size_t dictBufferCapacity,
  464. const void* samplesBuffer,
  465. const size_t* samplesSizes, unsigned nbSamples,
  466. ZDICT_fastCover_params_t parameters)
  467. {
  468. BYTE* const dict = (BYTE*)dictBuffer;
  469. FASTCOVER_ctx_t ctx;
  470. ZDICT_cover_params_t coverParams;
  471. FASTCOVER_accel_t accelParams;
  472. /* Initialize global data */
  473. g_displayLevel = parameters.zParams.notificationLevel;
  474. /* Assign splitPoint and f if not provided */
  475. parameters.splitPoint = 1.0;
  476. parameters.f = parameters.f == 0 ? DEFAULT_F : parameters.f;
  477. parameters.accel = parameters.accel == 0 ? DEFAULT_ACCEL : parameters.accel;
  478. /* Convert to cover parameter */
  479. memset(&coverParams, 0 , sizeof(coverParams));
  480. FASTCOVER_convertToCoverParams(parameters, &coverParams);
  481. /* Checks */
  482. if (!FASTCOVER_checkParameters(coverParams, dictBufferCapacity, parameters.f,
  483. parameters.accel)) {
  484. DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
  485. return ERROR(GENERIC);
  486. }
  487. if (nbSamples == 0) {
  488. DISPLAYLEVEL(1, "FASTCOVER must have at least one input file\n");
  489. return ERROR(GENERIC);
  490. }
  491. if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
  492. DISPLAYLEVEL(1, "dictBufferCapacity must be at least %u\n",
  493. ZDICT_DICTSIZE_MIN);
  494. return ERROR(dstSize_tooSmall);
  495. }
  496. /* Assign corresponding FASTCOVER_accel_t to accelParams*/
  497. accelParams = FASTCOVER_defaultAccelParameters[parameters.accel];
  498. /* Initialize context */
  499. if (!FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples,
  500. coverParams.d, parameters.splitPoint, parameters.f,
  501. accelParams)) {
  502. DISPLAYLEVEL(1, "Failed to initialize context\n");
  503. return ERROR(GENERIC);
  504. }
  505. /* Build the dictionary */
  506. DISPLAYLEVEL(2, "Building dictionary\n");
  507. {
  508. /* Initialize array to keep track of frequency of dmer within activeSegment */
  509. U16* segmentFreqs = (U16 *)calloc(((U64)1 << parameters.f), sizeof(U16));
  510. const size_t tail = FASTCOVER_buildDictionary(&ctx, ctx.freqs, dictBuffer,
  511. dictBufferCapacity, coverParams, segmentFreqs);
  512. const unsigned nbFinalizeSamples = (unsigned)(ctx.nbTrainSamples * ctx.accelParams.finalize / 100);
  513. const size_t dictionarySize = ZDICT_finalizeDictionary(
  514. dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
  515. samplesBuffer, samplesSizes, nbFinalizeSamples, coverParams.zParams);
  516. if (!ZSTD_isError(dictionarySize)) {
  517. DISPLAYLEVEL(2, "Constructed dictionary of size %u\n",
  518. (unsigned)dictionarySize);
  519. }
  520. FASTCOVER_ctx_destroy(&ctx);
  521. free(segmentFreqs);
  522. return dictionarySize;
  523. }
  524. }
  525. ZDICTLIB_API size_t
  526. ZDICT_optimizeTrainFromBuffer_fastCover(
  527. void* dictBuffer, size_t dictBufferCapacity,
  528. const void* samplesBuffer,
  529. const size_t* samplesSizes, unsigned nbSamples,
  530. ZDICT_fastCover_params_t* parameters)
  531. {
  532. ZDICT_cover_params_t coverParams;
  533. FASTCOVER_accel_t accelParams;
  534. /* constants */
  535. const unsigned nbThreads = parameters->nbThreads;
  536. const double splitPoint =
  537. parameters->splitPoint <= 0.0 ? DEFAULT_SPLITPOINT : parameters->splitPoint;
  538. const unsigned kMinD = parameters->d == 0 ? 6 : parameters->d;
  539. const unsigned kMaxD = parameters->d == 0 ? 8 : parameters->d;
  540. const unsigned kMinK = parameters->k == 0 ? 50 : parameters->k;
  541. const unsigned kMaxK = parameters->k == 0 ? 2000 : parameters->k;
  542. const unsigned kSteps = parameters->steps == 0 ? 40 : parameters->steps;
  543. const unsigned kStepSize = MAX((kMaxK - kMinK) / kSteps, 1);
  544. const unsigned kIterations =
  545. (1 + (kMaxD - kMinD) / 2) * (1 + (kMaxK - kMinK) / kStepSize);
  546. const unsigned f = parameters->f == 0 ? DEFAULT_F : parameters->f;
  547. const unsigned accel = parameters->accel == 0 ? DEFAULT_ACCEL : parameters->accel;
  548. /* Local variables */
  549. const int displayLevel = parameters->zParams.notificationLevel;
  550. unsigned iteration = 1;
  551. unsigned d;
  552. unsigned k;
  553. COVER_best_t best;
  554. POOL_ctx *pool = NULL;
  555. /* Checks */
  556. if (splitPoint <= 0 || splitPoint > 1) {
  557. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect splitPoint\n");
  558. return ERROR(GENERIC);
  559. }
  560. if (accel == 0 || accel > FASTCOVER_MAX_ACCEL) {
  561. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect accel\n");
  562. return ERROR(GENERIC);
  563. }
  564. if (kMinK < kMaxD || kMaxK < kMinK) {
  565. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect k\n");
  566. return ERROR(GENERIC);
  567. }
  568. if (nbSamples == 0) {
  569. LOCALDISPLAYLEVEL(displayLevel, 1, "FASTCOVER must have at least one input file\n");
  570. return ERROR(GENERIC);
  571. }
  572. if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
  573. LOCALDISPLAYLEVEL(displayLevel, 1, "dictBufferCapacity must be at least %u\n",
  574. ZDICT_DICTSIZE_MIN);
  575. return ERROR(dstSize_tooSmall);
  576. }
  577. if (nbThreads > 1) {
  578. pool = POOL_create(nbThreads, 1);
  579. if (!pool) {
  580. return ERROR(memory_allocation);
  581. }
  582. }
  583. /* Initialization */
  584. COVER_best_init(&best);
  585. memset(&coverParams, 0 , sizeof(coverParams));
  586. FASTCOVER_convertToCoverParams(*parameters, &coverParams);
  587. accelParams = FASTCOVER_defaultAccelParameters[accel];
  588. /* Turn down global display level to clean up display at level 2 and below */
  589. g_displayLevel = displayLevel == 0 ? 0 : displayLevel - 1;
  590. /* Loop through d first because each new value needs a new context */
  591. LOCALDISPLAYLEVEL(displayLevel, 2, "Trying %u different sets of parameters\n",
  592. kIterations);
  593. for (d = kMinD; d <= kMaxD; d += 2) {
  594. /* Initialize the context for this value of d */
  595. FASTCOVER_ctx_t ctx;
  596. LOCALDISPLAYLEVEL(displayLevel, 3, "d=%u\n", d);
  597. if (!FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples, d, splitPoint, f, accelParams)) {
  598. LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to initialize context\n");
  599. COVER_best_destroy(&best);
  600. POOL_free(pool);
  601. return ERROR(GENERIC);
  602. }
  603. /* Loop through k reusing the same context */
  604. for (k = kMinK; k <= kMaxK; k += kStepSize) {
  605. /* Prepare the arguments */
  606. FASTCOVER_tryParameters_data_t *data = (FASTCOVER_tryParameters_data_t *)malloc(
  607. sizeof(FASTCOVER_tryParameters_data_t));
  608. LOCALDISPLAYLEVEL(displayLevel, 3, "k=%u\n", k);
  609. if (!data) {
  610. LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to allocate parameters\n");
  611. COVER_best_destroy(&best);
  612. FASTCOVER_ctx_destroy(&ctx);
  613. POOL_free(pool);
  614. return ERROR(GENERIC);
  615. }
  616. data->ctx = &ctx;
  617. data->best = &best;
  618. data->dictBufferCapacity = dictBufferCapacity;
  619. data->parameters = coverParams;
  620. data->parameters.k = k;
  621. data->parameters.d = d;
  622. data->parameters.splitPoint = splitPoint;
  623. data->parameters.steps = kSteps;
  624. data->parameters.zParams.notificationLevel = g_displayLevel;
  625. /* Check the parameters */
  626. if (!FASTCOVER_checkParameters(data->parameters, dictBufferCapacity,
  627. data->ctx->f, accel)) {
  628. DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
  629. free(data);
  630. continue;
  631. }
  632. /* Call the function and pass ownership of data to it */
  633. COVER_best_start(&best);
  634. if (pool) {
  635. POOL_add(pool, &FASTCOVER_tryParameters, data);
  636. } else {
  637. FASTCOVER_tryParameters(data);
  638. }
  639. /* Print status */
  640. LOCALDISPLAYUPDATE(displayLevel, 2, "\r%u%% ",
  641. (unsigned)((iteration * 100) / kIterations));
  642. ++iteration;
  643. }
  644. COVER_best_wait(&best);
  645. FASTCOVER_ctx_destroy(&ctx);
  646. }
  647. LOCALDISPLAYLEVEL(displayLevel, 2, "\r%79s\r", "");
  648. /* Fill the output buffer and parameters with output of the best parameters */
  649. {
  650. const size_t dictSize = best.dictSize;
  651. if (ZSTD_isError(best.compressedSize)) {
  652. const size_t compressedSize = best.compressedSize;
  653. COVER_best_destroy(&best);
  654. POOL_free(pool);
  655. return compressedSize;
  656. }
  657. FASTCOVER_convertToFastCoverParams(best.parameters, parameters, f, accel);
  658. memcpy(dictBuffer, best.dict, dictSize);
  659. COVER_best_destroy(&best);
  660. POOL_free(pool);
  661. return dictSize;
  662. }
  663. }