fastcover.c 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757
  1. /*
  2. * Copyright (c) 2018-2020, Facebook, Inc.
  3. * All rights reserved.
  4. *
  5. * This source code is licensed under both the BSD-style license (found in the
  6. * LICENSE file in the root directory of this source tree) and the GPLv2 (found
  7. * in the COPYING file in the root directory of this source tree).
  8. * You may select, at your option, one of the above-listed licenses.
  9. */
  10. /*-*************************************
  11. * Dependencies
  12. ***************************************/
  13. #include <stdio.h> /* fprintf */
  14. #include <stdlib.h> /* malloc, free, qsort */
  15. #include <string.h> /* memset */
  16. #include <time.h> /* clock */
  17. #include "../common/mem.h" /* read */
  18. #include "../common/pool.h"
  19. #include "../common/threading.h"
  20. #include "cover.h"
  21. #include "../common/zstd_internal.h" /* includes zstd.h */
  22. #ifndef ZDICT_STATIC_LINKING_ONLY
  23. #define ZDICT_STATIC_LINKING_ONLY
  24. #endif
  25. #include "zdict.h"
  26. /*-*************************************
  27. * Constants
  28. ***************************************/
  29. #define FASTCOVER_MAX_SAMPLES_SIZE (sizeof(size_t) == 8 ? ((unsigned)-1) : ((unsigned)1 GB))
  30. #define FASTCOVER_MAX_F 31
  31. #define FASTCOVER_MAX_ACCEL 10
  32. #define DEFAULT_SPLITPOINT 0.75
  33. #define DEFAULT_F 20
  34. #define DEFAULT_ACCEL 1
  35. /*-*************************************
  36. * Console display
  37. ***************************************/
  38. static int g_displayLevel = 2;
  39. #define DISPLAY(...) \
  40. { \
  41. fprintf(stderr, __VA_ARGS__); \
  42. fflush(stderr); \
  43. }
  44. #define LOCALDISPLAYLEVEL(displayLevel, l, ...) \
  45. if (displayLevel >= l) { \
  46. DISPLAY(__VA_ARGS__); \
  47. } /* 0 : no display; 1: errors; 2: default; 3: details; 4: debug */
  48. #define DISPLAYLEVEL(l, ...) LOCALDISPLAYLEVEL(g_displayLevel, l, __VA_ARGS__)
  49. #define LOCALDISPLAYUPDATE(displayLevel, l, ...) \
  50. if (displayLevel >= l) { \
  51. if ((clock() - g_time > refreshRate) || (displayLevel >= 4)) { \
  52. g_time = clock(); \
  53. DISPLAY(__VA_ARGS__); \
  54. } \
  55. }
  56. #define DISPLAYUPDATE(l, ...) LOCALDISPLAYUPDATE(g_displayLevel, l, __VA_ARGS__)
  57. static const clock_t refreshRate = CLOCKS_PER_SEC * 15 / 100;
  58. static clock_t g_time = 0;
  59. /*-*************************************
  60. * Hash Functions
  61. ***************************************/
  62. static const U64 prime6bytes = 227718039650203ULL;
  63. static size_t ZSTD_hash6(U64 u, U32 h) { return (size_t)(((u << (64-48)) * prime6bytes) >> (64-h)) ; }
  64. static size_t ZSTD_hash6Ptr(const void* p, U32 h) { return ZSTD_hash6(MEM_readLE64(p), h); }
  65. static const U64 prime8bytes = 0xCF1BBCDCB7A56463ULL;
  66. static size_t ZSTD_hash8(U64 u, U32 h) { return (size_t)(((u) * prime8bytes) >> (64-h)) ; }
  67. static size_t ZSTD_hash8Ptr(const void* p, U32 h) { return ZSTD_hash8(MEM_readLE64(p), h); }
  68. /**
  69. * Hash the d-byte value pointed to by p and mod 2^f
  70. */
  71. static size_t FASTCOVER_hashPtrToIndex(const void* p, U32 h, unsigned d) {
  72. if (d == 6) {
  73. return ZSTD_hash6Ptr(p, h) & ((1 << h) - 1);
  74. }
  75. return ZSTD_hash8Ptr(p, h) & ((1 << h) - 1);
  76. }
  77. /*-*************************************
  78. * Acceleration
  79. ***************************************/
  80. typedef struct {
  81. unsigned finalize; /* Percentage of training samples used for ZDICT_finalizeDictionary */
  82. unsigned skip; /* Number of dmer skipped between each dmer counted in computeFrequency */
  83. } FASTCOVER_accel_t;
  84. static const FASTCOVER_accel_t FASTCOVER_defaultAccelParameters[FASTCOVER_MAX_ACCEL+1] = {
  85. { 100, 0 }, /* accel = 0, should not happen because accel = 0 defaults to accel = 1 */
  86. { 100, 0 }, /* accel = 1 */
  87. { 50, 1 }, /* accel = 2 */
  88. { 34, 2 }, /* accel = 3 */
  89. { 25, 3 }, /* accel = 4 */
  90. { 20, 4 }, /* accel = 5 */
  91. { 17, 5 }, /* accel = 6 */
  92. { 14, 6 }, /* accel = 7 */
  93. { 13, 7 }, /* accel = 8 */
  94. { 11, 8 }, /* accel = 9 */
  95. { 10, 9 }, /* accel = 10 */
  96. };
  97. /*-*************************************
  98. * Context
  99. ***************************************/
  100. typedef struct {
  101. const BYTE *samples;
  102. size_t *offsets;
  103. const size_t *samplesSizes;
  104. size_t nbSamples;
  105. size_t nbTrainSamples;
  106. size_t nbTestSamples;
  107. size_t nbDmers;
  108. U32 *freqs;
  109. unsigned d;
  110. unsigned f;
  111. FASTCOVER_accel_t accelParams;
  112. } FASTCOVER_ctx_t;
  113. /*-*************************************
  114. * Helper functions
  115. ***************************************/
  116. /**
  117. * Selects the best segment in an epoch.
  118. * Segments of are scored according to the function:
  119. *
  120. * Let F(d) be the frequency of all dmers with hash value d.
  121. * Let S_i be hash value of the dmer at position i of segment S which has length k.
  122. *
  123. * Score(S) = F(S_1) + F(S_2) + ... + F(S_{k-d+1})
  124. *
  125. * Once the dmer with hash value d is in the dictionary we set F(d) = 0.
  126. */
  127. static COVER_segment_t FASTCOVER_selectSegment(const FASTCOVER_ctx_t *ctx,
  128. U32 *freqs, U32 begin, U32 end,
  129. ZDICT_cover_params_t parameters,
  130. U16* segmentFreqs) {
  131. /* Constants */
  132. const U32 k = parameters.k;
  133. const U32 d = parameters.d;
  134. const U32 f = ctx->f;
  135. const U32 dmersInK = k - d + 1;
  136. /* Try each segment (activeSegment) and save the best (bestSegment) */
  137. COVER_segment_t bestSegment = {0, 0, 0};
  138. COVER_segment_t activeSegment;
  139. /* Reset the activeDmers in the segment */
  140. /* The activeSegment starts at the beginning of the epoch. */
  141. activeSegment.begin = begin;
  142. activeSegment.end = begin;
  143. activeSegment.score = 0;
  144. /* Slide the activeSegment through the whole epoch.
  145. * Save the best segment in bestSegment.
  146. */
  147. while (activeSegment.end < end) {
  148. /* Get hash value of current dmer */
  149. const size_t idx = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.end, f, d);
  150. /* Add frequency of this index to score if this is the first occurrence of index in active segment */
  151. if (segmentFreqs[idx] == 0) {
  152. activeSegment.score += freqs[idx];
  153. }
  154. /* Increment end of segment and segmentFreqs*/
  155. activeSegment.end += 1;
  156. segmentFreqs[idx] += 1;
  157. /* If the window is now too large, drop the first position */
  158. if (activeSegment.end - activeSegment.begin == dmersInK + 1) {
  159. /* Get hash value of the dmer to be eliminated from active segment */
  160. const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
  161. segmentFreqs[delIndex] -= 1;
  162. /* Subtract frequency of this index from score if this is the last occurrence of this index in active segment */
  163. if (segmentFreqs[delIndex] == 0) {
  164. activeSegment.score -= freqs[delIndex];
  165. }
  166. /* Increment start of segment */
  167. activeSegment.begin += 1;
  168. }
  169. /* If this segment is the best so far save it */
  170. if (activeSegment.score > bestSegment.score) {
  171. bestSegment = activeSegment;
  172. }
  173. }
  174. /* Zero out rest of segmentFreqs array */
  175. while (activeSegment.begin < end) {
  176. const size_t delIndex = FASTCOVER_hashPtrToIndex(ctx->samples + activeSegment.begin, f, d);
  177. segmentFreqs[delIndex] -= 1;
  178. activeSegment.begin += 1;
  179. }
  180. {
  181. /* Zero the frequency of hash value of each dmer covered by the chosen segment. */
  182. U32 pos;
  183. for (pos = bestSegment.begin; pos != bestSegment.end; ++pos) {
  184. const size_t i = FASTCOVER_hashPtrToIndex(ctx->samples + pos, f, d);
  185. freqs[i] = 0;
  186. }
  187. }
  188. return bestSegment;
  189. }
  190. static int FASTCOVER_checkParameters(ZDICT_cover_params_t parameters,
  191. size_t maxDictSize, unsigned f,
  192. unsigned accel) {
  193. /* k, d, and f are required parameters */
  194. if (parameters.d == 0 || parameters.k == 0) {
  195. return 0;
  196. }
  197. /* d has to be 6 or 8 */
  198. if (parameters.d != 6 && parameters.d != 8) {
  199. return 0;
  200. }
  201. /* k <= maxDictSize */
  202. if (parameters.k > maxDictSize) {
  203. return 0;
  204. }
  205. /* d <= k */
  206. if (parameters.d > parameters.k) {
  207. return 0;
  208. }
  209. /* 0 < f <= FASTCOVER_MAX_F*/
  210. if (f > FASTCOVER_MAX_F || f == 0) {
  211. return 0;
  212. }
  213. /* 0 < splitPoint <= 1 */
  214. if (parameters.splitPoint <= 0 || parameters.splitPoint > 1) {
  215. return 0;
  216. }
  217. /* 0 < accel <= 10 */
  218. if (accel > 10 || accel == 0) {
  219. return 0;
  220. }
  221. return 1;
  222. }
  223. /**
  224. * Clean up a context initialized with `FASTCOVER_ctx_init()`.
  225. */
  226. static void
  227. FASTCOVER_ctx_destroy(FASTCOVER_ctx_t* ctx)
  228. {
  229. if (!ctx) return;
  230. free(ctx->freqs);
  231. ctx->freqs = NULL;
  232. free(ctx->offsets);
  233. ctx->offsets = NULL;
  234. }
  235. /**
  236. * Calculate for frequency of hash value of each dmer in ctx->samples
  237. */
  238. static void
  239. FASTCOVER_computeFrequency(U32* freqs, const FASTCOVER_ctx_t* ctx)
  240. {
  241. const unsigned f = ctx->f;
  242. const unsigned d = ctx->d;
  243. const unsigned skip = ctx->accelParams.skip;
  244. const unsigned readLength = MAX(d, 8);
  245. size_t i;
  246. assert(ctx->nbTrainSamples >= 5);
  247. assert(ctx->nbTrainSamples <= ctx->nbSamples);
  248. for (i = 0; i < ctx->nbTrainSamples; i++) {
  249. size_t start = ctx->offsets[i]; /* start of current dmer */
  250. size_t const currSampleEnd = ctx->offsets[i+1];
  251. while (start + readLength <= currSampleEnd) {
  252. const size_t dmerIndex = FASTCOVER_hashPtrToIndex(ctx->samples + start, f, d);
  253. freqs[dmerIndex]++;
  254. start = start + skip + 1;
  255. }
  256. }
  257. }
  258. /**
  259. * Prepare a context for dictionary building.
  260. * The context is only dependent on the parameter `d` and can used multiple
  261. * times.
  262. * Returns 0 on success or error code on error.
  263. * The context must be destroyed with `FASTCOVER_ctx_destroy()`.
  264. */
  265. static size_t
  266. FASTCOVER_ctx_init(FASTCOVER_ctx_t* ctx,
  267. const void* samplesBuffer,
  268. const size_t* samplesSizes, unsigned nbSamples,
  269. unsigned d, double splitPoint, unsigned f,
  270. FASTCOVER_accel_t accelParams)
  271. {
  272. const BYTE* const samples = (const BYTE*)samplesBuffer;
  273. const size_t totalSamplesSize = COVER_sum(samplesSizes, nbSamples);
  274. /* Split samples into testing and training sets */
  275. const unsigned nbTrainSamples = splitPoint < 1.0 ? (unsigned)((double)nbSamples * splitPoint) : nbSamples;
  276. const unsigned nbTestSamples = splitPoint < 1.0 ? nbSamples - nbTrainSamples : nbSamples;
  277. const size_t trainingSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes, nbTrainSamples) : totalSamplesSize;
  278. const size_t testSamplesSize = splitPoint < 1.0 ? COVER_sum(samplesSizes + nbTrainSamples, nbTestSamples) : totalSamplesSize;
  279. /* Checks */
  280. if (totalSamplesSize < MAX(d, sizeof(U64)) ||
  281. totalSamplesSize >= (size_t)FASTCOVER_MAX_SAMPLES_SIZE) {
  282. DISPLAYLEVEL(1, "Total samples size is too large (%u MB), maximum size is %u MB\n",
  283. (unsigned)(totalSamplesSize >> 20), (FASTCOVER_MAX_SAMPLES_SIZE >> 20));
  284. return ERROR(srcSize_wrong);
  285. }
  286. /* Check if there are at least 5 training samples */
  287. if (nbTrainSamples < 5) {
  288. DISPLAYLEVEL(1, "Total number of training samples is %u and is invalid\n", nbTrainSamples);
  289. return ERROR(srcSize_wrong);
  290. }
  291. /* Check if there's testing sample */
  292. if (nbTestSamples < 1) {
  293. DISPLAYLEVEL(1, "Total number of testing samples is %u and is invalid.\n", nbTestSamples);
  294. return ERROR(srcSize_wrong);
  295. }
  296. /* Zero the context */
  297. memset(ctx, 0, sizeof(*ctx));
  298. DISPLAYLEVEL(2, "Training on %u samples of total size %u\n", nbTrainSamples,
  299. (unsigned)trainingSamplesSize);
  300. DISPLAYLEVEL(2, "Testing on %u samples of total size %u\n", nbTestSamples,
  301. (unsigned)testSamplesSize);
  302. ctx->samples = samples;
  303. ctx->samplesSizes = samplesSizes;
  304. ctx->nbSamples = nbSamples;
  305. ctx->nbTrainSamples = nbTrainSamples;
  306. ctx->nbTestSamples = nbTestSamples;
  307. ctx->nbDmers = trainingSamplesSize - MAX(d, sizeof(U64)) + 1;
  308. ctx->d = d;
  309. ctx->f = f;
  310. ctx->accelParams = accelParams;
  311. /* The offsets of each file */
  312. ctx->offsets = (size_t*)calloc((nbSamples + 1), sizeof(size_t));
  313. if (ctx->offsets == NULL) {
  314. DISPLAYLEVEL(1, "Failed to allocate scratch buffers \n");
  315. FASTCOVER_ctx_destroy(ctx);
  316. return ERROR(memory_allocation);
  317. }
  318. /* Fill offsets from the samplesSizes */
  319. { U32 i;
  320. ctx->offsets[0] = 0;
  321. assert(nbSamples >= 5);
  322. for (i = 1; i <= nbSamples; ++i) {
  323. ctx->offsets[i] = ctx->offsets[i - 1] + samplesSizes[i - 1];
  324. }
  325. }
  326. /* Initialize frequency array of size 2^f */
  327. ctx->freqs = (U32*)calloc(((U64)1 << f), sizeof(U32));
  328. if (ctx->freqs == NULL) {
  329. DISPLAYLEVEL(1, "Failed to allocate frequency table \n");
  330. FASTCOVER_ctx_destroy(ctx);
  331. return ERROR(memory_allocation);
  332. }
  333. DISPLAYLEVEL(2, "Computing frequencies\n");
  334. FASTCOVER_computeFrequency(ctx->freqs, ctx);
  335. return 0;
  336. }
  337. /**
  338. * Given the prepared context build the dictionary.
  339. */
  340. static size_t
  341. FASTCOVER_buildDictionary(const FASTCOVER_ctx_t* ctx,
  342. U32* freqs,
  343. void* dictBuffer, size_t dictBufferCapacity,
  344. ZDICT_cover_params_t parameters,
  345. U16* segmentFreqs)
  346. {
  347. BYTE *const dict = (BYTE *)dictBuffer;
  348. size_t tail = dictBufferCapacity;
  349. /* Divide the data into epochs. We will select one segment from each epoch. */
  350. const COVER_epoch_info_t epochs = COVER_computeEpochs(
  351. (U32)dictBufferCapacity, (U32)ctx->nbDmers, parameters.k, 1);
  352. const size_t maxZeroScoreRun = 10;
  353. size_t zeroScoreRun = 0;
  354. size_t epoch;
  355. DISPLAYLEVEL(2, "Breaking content into %u epochs of size %u\n",
  356. (U32)epochs.num, (U32)epochs.size);
  357. /* Loop through the epochs until there are no more segments or the dictionary
  358. * is full.
  359. */
  360. for (epoch = 0; tail > 0; epoch = (epoch + 1) % epochs.num) {
  361. const U32 epochBegin = (U32)(epoch * epochs.size);
  362. const U32 epochEnd = epochBegin + epochs.size;
  363. size_t segmentSize;
  364. /* Select a segment */
  365. COVER_segment_t segment = FASTCOVER_selectSegment(
  366. ctx, freqs, epochBegin, epochEnd, parameters, segmentFreqs);
  367. /* If the segment covers no dmers, then we are out of content.
  368. * There may be new content in other epochs, for continue for some time.
  369. */
  370. if (segment.score == 0) {
  371. if (++zeroScoreRun >= maxZeroScoreRun) {
  372. break;
  373. }
  374. continue;
  375. }
  376. zeroScoreRun = 0;
  377. /* Trim the segment if necessary and if it is too small then we are done */
  378. segmentSize = MIN(segment.end - segment.begin + parameters.d - 1, tail);
  379. if (segmentSize < parameters.d) {
  380. break;
  381. }
  382. /* We fill the dictionary from the back to allow the best segments to be
  383. * referenced with the smallest offsets.
  384. */
  385. tail -= segmentSize;
  386. memcpy(dict + tail, ctx->samples + segment.begin, segmentSize);
  387. DISPLAYUPDATE(
  388. 2, "\r%u%% ",
  389. (unsigned)(((dictBufferCapacity - tail) * 100) / dictBufferCapacity));
  390. }
  391. DISPLAYLEVEL(2, "\r%79s\r", "");
  392. return tail;
  393. }
  394. /**
  395. * Parameters for FASTCOVER_tryParameters().
  396. */
  397. typedef struct FASTCOVER_tryParameters_data_s {
  398. const FASTCOVER_ctx_t* ctx;
  399. COVER_best_t* best;
  400. size_t dictBufferCapacity;
  401. ZDICT_cover_params_t parameters;
  402. } FASTCOVER_tryParameters_data_t;
  403. /**
  404. * Tries a set of parameters and updates the COVER_best_t with the results.
  405. * This function is thread safe if zstd is compiled with multithreaded support.
  406. * It takes its parameters as an *OWNING* opaque pointer to support threading.
  407. */
  408. static void FASTCOVER_tryParameters(void *opaque)
  409. {
  410. /* Save parameters as local variables */
  411. FASTCOVER_tryParameters_data_t *const data = (FASTCOVER_tryParameters_data_t *)opaque;
  412. const FASTCOVER_ctx_t *const ctx = data->ctx;
  413. const ZDICT_cover_params_t parameters = data->parameters;
  414. size_t dictBufferCapacity = data->dictBufferCapacity;
  415. size_t totalCompressedSize = ERROR(GENERIC);
  416. /* Initialize array to keep track of frequency of dmer within activeSegment */
  417. U16* segmentFreqs = (U16 *)calloc(((U64)1 << ctx->f), sizeof(U16));
  418. /* Allocate space for hash table, dict, and freqs */
  419. BYTE *const dict = (BYTE * const)malloc(dictBufferCapacity);
  420. COVER_dictSelection_t selection = COVER_dictSelectionError(ERROR(GENERIC));
  421. U32 *freqs = (U32*) malloc(((U64)1 << ctx->f) * sizeof(U32));
  422. if (!segmentFreqs || !dict || !freqs) {
  423. DISPLAYLEVEL(1, "Failed to allocate buffers: out of memory\n");
  424. goto _cleanup;
  425. }
  426. /* Copy the frequencies because we need to modify them */
  427. memcpy(freqs, ctx->freqs, ((U64)1 << ctx->f) * sizeof(U32));
  428. /* Build the dictionary */
  429. { const size_t tail = FASTCOVER_buildDictionary(ctx, freqs, dict, dictBufferCapacity,
  430. parameters, segmentFreqs);
  431. const unsigned nbFinalizeSamples = (unsigned)(ctx->nbTrainSamples * ctx->accelParams.finalize / 100);
  432. selection = COVER_selectDict(dict + tail, dictBufferCapacity - tail,
  433. ctx->samples, ctx->samplesSizes, nbFinalizeSamples, ctx->nbTrainSamples, ctx->nbSamples, parameters, ctx->offsets,
  434. totalCompressedSize);
  435. if (COVER_dictSelectionIsError(selection)) {
  436. DISPLAYLEVEL(1, "Failed to select dictionary\n");
  437. goto _cleanup;
  438. }
  439. }
  440. _cleanup:
  441. free(dict);
  442. COVER_best_finish(data->best, parameters, selection);
  443. free(data);
  444. free(segmentFreqs);
  445. COVER_dictSelectionFree(selection);
  446. free(freqs);
  447. }
  448. static void
  449. FASTCOVER_convertToCoverParams(ZDICT_fastCover_params_t fastCoverParams,
  450. ZDICT_cover_params_t* coverParams)
  451. {
  452. coverParams->k = fastCoverParams.k;
  453. coverParams->d = fastCoverParams.d;
  454. coverParams->steps = fastCoverParams.steps;
  455. coverParams->nbThreads = fastCoverParams.nbThreads;
  456. coverParams->splitPoint = fastCoverParams.splitPoint;
  457. coverParams->zParams = fastCoverParams.zParams;
  458. coverParams->shrinkDict = fastCoverParams.shrinkDict;
  459. }
  460. static void
  461. FASTCOVER_convertToFastCoverParams(ZDICT_cover_params_t coverParams,
  462. ZDICT_fastCover_params_t* fastCoverParams,
  463. unsigned f, unsigned accel)
  464. {
  465. fastCoverParams->k = coverParams.k;
  466. fastCoverParams->d = coverParams.d;
  467. fastCoverParams->steps = coverParams.steps;
  468. fastCoverParams->nbThreads = coverParams.nbThreads;
  469. fastCoverParams->splitPoint = coverParams.splitPoint;
  470. fastCoverParams->f = f;
  471. fastCoverParams->accel = accel;
  472. fastCoverParams->zParams = coverParams.zParams;
  473. fastCoverParams->shrinkDict = coverParams.shrinkDict;
  474. }
  475. ZDICTLIB_API size_t
  476. ZDICT_trainFromBuffer_fastCover(void* dictBuffer, size_t dictBufferCapacity,
  477. const void* samplesBuffer,
  478. const size_t* samplesSizes, unsigned nbSamples,
  479. ZDICT_fastCover_params_t parameters)
  480. {
  481. BYTE* const dict = (BYTE*)dictBuffer;
  482. FASTCOVER_ctx_t ctx;
  483. ZDICT_cover_params_t coverParams;
  484. FASTCOVER_accel_t accelParams;
  485. /* Initialize global data */
  486. g_displayLevel = parameters.zParams.notificationLevel;
  487. /* Assign splitPoint and f if not provided */
  488. parameters.splitPoint = 1.0;
  489. parameters.f = parameters.f == 0 ? DEFAULT_F : parameters.f;
  490. parameters.accel = parameters.accel == 0 ? DEFAULT_ACCEL : parameters.accel;
  491. /* Convert to cover parameter */
  492. memset(&coverParams, 0 , sizeof(coverParams));
  493. FASTCOVER_convertToCoverParams(parameters, &coverParams);
  494. /* Checks */
  495. if (!FASTCOVER_checkParameters(coverParams, dictBufferCapacity, parameters.f,
  496. parameters.accel)) {
  497. DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
  498. return ERROR(parameter_outOfBound);
  499. }
  500. if (nbSamples == 0) {
  501. DISPLAYLEVEL(1, "FASTCOVER must have at least one input file\n");
  502. return ERROR(srcSize_wrong);
  503. }
  504. if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
  505. DISPLAYLEVEL(1, "dictBufferCapacity must be at least %u\n",
  506. ZDICT_DICTSIZE_MIN);
  507. return ERROR(dstSize_tooSmall);
  508. }
  509. /* Assign corresponding FASTCOVER_accel_t to accelParams*/
  510. accelParams = FASTCOVER_defaultAccelParameters[parameters.accel];
  511. /* Initialize context */
  512. {
  513. size_t const initVal = FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples,
  514. coverParams.d, parameters.splitPoint, parameters.f,
  515. accelParams);
  516. if (ZSTD_isError(initVal)) {
  517. DISPLAYLEVEL(1, "Failed to initialize context\n");
  518. return initVal;
  519. }
  520. }
  521. COVER_warnOnSmallCorpus(dictBufferCapacity, ctx.nbDmers, g_displayLevel);
  522. /* Build the dictionary */
  523. DISPLAYLEVEL(2, "Building dictionary\n");
  524. {
  525. /* Initialize array to keep track of frequency of dmer within activeSegment */
  526. U16* segmentFreqs = (U16 *)calloc(((U64)1 << parameters.f), sizeof(U16));
  527. const size_t tail = FASTCOVER_buildDictionary(&ctx, ctx.freqs, dictBuffer,
  528. dictBufferCapacity, coverParams, segmentFreqs);
  529. const unsigned nbFinalizeSamples = (unsigned)(ctx.nbTrainSamples * ctx.accelParams.finalize / 100);
  530. const size_t dictionarySize = ZDICT_finalizeDictionary(
  531. dict, dictBufferCapacity, dict + tail, dictBufferCapacity - tail,
  532. samplesBuffer, samplesSizes, nbFinalizeSamples, coverParams.zParams);
  533. if (!ZSTD_isError(dictionarySize)) {
  534. DISPLAYLEVEL(2, "Constructed dictionary of size %u\n",
  535. (unsigned)dictionarySize);
  536. }
  537. FASTCOVER_ctx_destroy(&ctx);
  538. free(segmentFreqs);
  539. return dictionarySize;
  540. }
  541. }
  542. ZDICTLIB_API size_t
  543. ZDICT_optimizeTrainFromBuffer_fastCover(
  544. void* dictBuffer, size_t dictBufferCapacity,
  545. const void* samplesBuffer,
  546. const size_t* samplesSizes, unsigned nbSamples,
  547. ZDICT_fastCover_params_t* parameters)
  548. {
  549. ZDICT_cover_params_t coverParams;
  550. FASTCOVER_accel_t accelParams;
  551. /* constants */
  552. const unsigned nbThreads = parameters->nbThreads;
  553. const double splitPoint =
  554. parameters->splitPoint <= 0.0 ? DEFAULT_SPLITPOINT : parameters->splitPoint;
  555. const unsigned kMinD = parameters->d == 0 ? 6 : parameters->d;
  556. const unsigned kMaxD = parameters->d == 0 ? 8 : parameters->d;
  557. const unsigned kMinK = parameters->k == 0 ? 50 : parameters->k;
  558. const unsigned kMaxK = parameters->k == 0 ? 2000 : parameters->k;
  559. const unsigned kSteps = parameters->steps == 0 ? 40 : parameters->steps;
  560. const unsigned kStepSize = MAX((kMaxK - kMinK) / kSteps, 1);
  561. const unsigned kIterations =
  562. (1 + (kMaxD - kMinD) / 2) * (1 + (kMaxK - kMinK) / kStepSize);
  563. const unsigned f = parameters->f == 0 ? DEFAULT_F : parameters->f;
  564. const unsigned accel = parameters->accel == 0 ? DEFAULT_ACCEL : parameters->accel;
  565. const unsigned shrinkDict = 0;
  566. /* Local variables */
  567. const int displayLevel = parameters->zParams.notificationLevel;
  568. unsigned iteration = 1;
  569. unsigned d;
  570. unsigned k;
  571. COVER_best_t best;
  572. POOL_ctx *pool = NULL;
  573. int warned = 0;
  574. /* Checks */
  575. if (splitPoint <= 0 || splitPoint > 1) {
  576. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect splitPoint\n");
  577. return ERROR(parameter_outOfBound);
  578. }
  579. if (accel == 0 || accel > FASTCOVER_MAX_ACCEL) {
  580. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect accel\n");
  581. return ERROR(parameter_outOfBound);
  582. }
  583. if (kMinK < kMaxD || kMaxK < kMinK) {
  584. LOCALDISPLAYLEVEL(displayLevel, 1, "Incorrect k\n");
  585. return ERROR(parameter_outOfBound);
  586. }
  587. if (nbSamples == 0) {
  588. LOCALDISPLAYLEVEL(displayLevel, 1, "FASTCOVER must have at least one input file\n");
  589. return ERROR(srcSize_wrong);
  590. }
  591. if (dictBufferCapacity < ZDICT_DICTSIZE_MIN) {
  592. LOCALDISPLAYLEVEL(displayLevel, 1, "dictBufferCapacity must be at least %u\n",
  593. ZDICT_DICTSIZE_MIN);
  594. return ERROR(dstSize_tooSmall);
  595. }
  596. if (nbThreads > 1) {
  597. pool = POOL_create(nbThreads, 1);
  598. if (!pool) {
  599. return ERROR(memory_allocation);
  600. }
  601. }
  602. /* Initialization */
  603. COVER_best_init(&best);
  604. memset(&coverParams, 0 , sizeof(coverParams));
  605. FASTCOVER_convertToCoverParams(*parameters, &coverParams);
  606. accelParams = FASTCOVER_defaultAccelParameters[accel];
  607. /* Turn down global display level to clean up display at level 2 and below */
  608. g_displayLevel = displayLevel == 0 ? 0 : displayLevel - 1;
  609. /* Loop through d first because each new value needs a new context */
  610. LOCALDISPLAYLEVEL(displayLevel, 2, "Trying %u different sets of parameters\n",
  611. kIterations);
  612. for (d = kMinD; d <= kMaxD; d += 2) {
  613. /* Initialize the context for this value of d */
  614. FASTCOVER_ctx_t ctx;
  615. LOCALDISPLAYLEVEL(displayLevel, 3, "d=%u\n", d);
  616. {
  617. size_t const initVal = FASTCOVER_ctx_init(&ctx, samplesBuffer, samplesSizes, nbSamples, d, splitPoint, f, accelParams);
  618. if (ZSTD_isError(initVal)) {
  619. LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to initialize context\n");
  620. COVER_best_destroy(&best);
  621. POOL_free(pool);
  622. return initVal;
  623. }
  624. }
  625. if (!warned) {
  626. COVER_warnOnSmallCorpus(dictBufferCapacity, ctx.nbDmers, displayLevel);
  627. warned = 1;
  628. }
  629. /* Loop through k reusing the same context */
  630. for (k = kMinK; k <= kMaxK; k += kStepSize) {
  631. /* Prepare the arguments */
  632. FASTCOVER_tryParameters_data_t *data = (FASTCOVER_tryParameters_data_t *)malloc(
  633. sizeof(FASTCOVER_tryParameters_data_t));
  634. LOCALDISPLAYLEVEL(displayLevel, 3, "k=%u\n", k);
  635. if (!data) {
  636. LOCALDISPLAYLEVEL(displayLevel, 1, "Failed to allocate parameters\n");
  637. COVER_best_destroy(&best);
  638. FASTCOVER_ctx_destroy(&ctx);
  639. POOL_free(pool);
  640. return ERROR(memory_allocation);
  641. }
  642. data->ctx = &ctx;
  643. data->best = &best;
  644. data->dictBufferCapacity = dictBufferCapacity;
  645. data->parameters = coverParams;
  646. data->parameters.k = k;
  647. data->parameters.d = d;
  648. data->parameters.splitPoint = splitPoint;
  649. data->parameters.steps = kSteps;
  650. data->parameters.shrinkDict = shrinkDict;
  651. data->parameters.zParams.notificationLevel = g_displayLevel;
  652. /* Check the parameters */
  653. if (!FASTCOVER_checkParameters(data->parameters, dictBufferCapacity,
  654. data->ctx->f, accel)) {
  655. DISPLAYLEVEL(1, "FASTCOVER parameters incorrect\n");
  656. free(data);
  657. continue;
  658. }
  659. /* Call the function and pass ownership of data to it */
  660. COVER_best_start(&best);
  661. if (pool) {
  662. POOL_add(pool, &FASTCOVER_tryParameters, data);
  663. } else {
  664. FASTCOVER_tryParameters(data);
  665. }
  666. /* Print status */
  667. LOCALDISPLAYUPDATE(displayLevel, 2, "\r%u%% ",
  668. (unsigned)((iteration * 100) / kIterations));
  669. ++iteration;
  670. }
  671. COVER_best_wait(&best);
  672. FASTCOVER_ctx_destroy(&ctx);
  673. }
  674. LOCALDISPLAYLEVEL(displayLevel, 2, "\r%79s\r", "");
  675. /* Fill the output buffer and parameters with output of the best parameters */
  676. {
  677. const size_t dictSize = best.dictSize;
  678. if (ZSTD_isError(best.compressedSize)) {
  679. const size_t compressedSize = best.compressedSize;
  680. COVER_best_destroy(&best);
  681. POOL_free(pool);
  682. return compressedSize;
  683. }
  684. FASTCOVER_convertToFastCoverParams(best.parameters, parameters, f, accel);
  685. memcpy(dictBuffer, best.dict, dictSize);
  686. COVER_best_destroy(&best);
  687. POOL_free(pool);
  688. return dictSize;
  689. }
  690. }