jpayne@69: /* jpayne@69: * Copyright (c) Meta Platforms, Inc. and affiliates. jpayne@69: * All rights reserved. jpayne@69: * jpayne@69: * This source code is licensed under both the BSD-style license (found in the jpayne@69: * LICENSE file in the root directory of this source tree) and the GPLv2 (found jpayne@69: * in the COPYING file in the root directory of this source tree). jpayne@69: * You may select, at your option, one of the above-listed licenses. jpayne@69: */ jpayne@69: jpayne@69: #if defined (__cplusplus) jpayne@69: extern "C" { jpayne@69: #endif jpayne@69: jpayne@69: #ifndef ZSTD_ZDICT_H jpayne@69: #define ZSTD_ZDICT_H jpayne@69: jpayne@69: /*====== Dependencies ======*/ jpayne@69: #include /* size_t */ jpayne@69: jpayne@69: jpayne@69: /* ===== ZDICTLIB_API : control library symbols visibility ===== */ jpayne@69: #ifndef ZDICTLIB_VISIBLE jpayne@69: /* Backwards compatibility with old macro name */ jpayne@69: # ifdef ZDICTLIB_VISIBILITY jpayne@69: # define ZDICTLIB_VISIBLE ZDICTLIB_VISIBILITY jpayne@69: # elif defined(__GNUC__) && (__GNUC__ >= 4) && !defined(__MINGW32__) jpayne@69: # define ZDICTLIB_VISIBLE __attribute__ ((visibility ("default"))) jpayne@69: # else jpayne@69: # define ZDICTLIB_VISIBLE jpayne@69: # endif jpayne@69: #endif jpayne@69: jpayne@69: #ifndef ZDICTLIB_HIDDEN jpayne@69: # if defined(__GNUC__) && (__GNUC__ >= 4) && !defined(__MINGW32__) jpayne@69: # define ZDICTLIB_HIDDEN __attribute__ ((visibility ("hidden"))) jpayne@69: # else jpayne@69: # define ZDICTLIB_HIDDEN jpayne@69: # endif jpayne@69: #endif jpayne@69: jpayne@69: #if defined(ZSTD_DLL_EXPORT) && (ZSTD_DLL_EXPORT==1) jpayne@69: # define ZDICTLIB_API __declspec(dllexport) ZDICTLIB_VISIBLE jpayne@69: #elif defined(ZSTD_DLL_IMPORT) && (ZSTD_DLL_IMPORT==1) jpayne@69: # define ZDICTLIB_API __declspec(dllimport) ZDICTLIB_VISIBLE /* It isn't required but allows to generate better code, saving a function pointer load from the IAT and an indirect jump.*/ jpayne@69: #else jpayne@69: # define ZDICTLIB_API ZDICTLIB_VISIBLE jpayne@69: #endif jpayne@69: jpayne@69: /******************************************************************************* jpayne@69: * Zstd dictionary builder jpayne@69: * jpayne@69: * FAQ jpayne@69: * === jpayne@69: * Why should I use a dictionary? jpayne@69: * ------------------------------ jpayne@69: * jpayne@69: * Zstd can use dictionaries to improve compression ratio of small data. jpayne@69: * Traditionally small files don't compress well because there is very little jpayne@69: * repetition in a single sample, since it is small. But, if you are compressing jpayne@69: * many similar files, like a bunch of JSON records that share the same jpayne@69: * structure, you can train a dictionary on ahead of time on some samples of jpayne@69: * these files. Then, zstd can use the dictionary to find repetitions that are jpayne@69: * present across samples. This can vastly improve compression ratio. jpayne@69: * jpayne@69: * When is a dictionary useful? jpayne@69: * ---------------------------- jpayne@69: * jpayne@69: * Dictionaries are useful when compressing many small files that are similar. jpayne@69: * The larger a file is, the less benefit a dictionary will have. Generally, jpayne@69: * we don't expect dictionary compression to be effective past 100KB. And the jpayne@69: * smaller a file is, the more we would expect the dictionary to help. jpayne@69: * jpayne@69: * How do I use a dictionary? jpayne@69: * -------------------------- jpayne@69: * jpayne@69: * Simply pass the dictionary to the zstd compressor with jpayne@69: * `ZSTD_CCtx_loadDictionary()`. The same dictionary must then be passed to jpayne@69: * the decompressor, using `ZSTD_DCtx_loadDictionary()`. There are other jpayne@69: * more advanced functions that allow selecting some options, see zstd.h for jpayne@69: * complete documentation. jpayne@69: * jpayne@69: * What is a zstd dictionary? jpayne@69: * -------------------------- jpayne@69: * jpayne@69: * A zstd dictionary has two pieces: Its header, and its content. The header jpayne@69: * contains a magic number, the dictionary ID, and entropy tables. These jpayne@69: * entropy tables allow zstd to save on header costs in the compressed file, jpayne@69: * which really matters for small data. The content is just bytes, which are jpayne@69: * repeated content that is common across many samples. jpayne@69: * jpayne@69: * What is a raw content dictionary? jpayne@69: * --------------------------------- jpayne@69: * jpayne@69: * A raw content dictionary is just bytes. It doesn't have a zstd dictionary jpayne@69: * header, a dictionary ID, or entropy tables. Any buffer is a valid raw jpayne@69: * content dictionary. jpayne@69: * jpayne@69: * How do I train a dictionary? jpayne@69: * ---------------------------- jpayne@69: * jpayne@69: * Gather samples from your use case. These samples should be similar to each jpayne@69: * other. If you have several use cases, you could try to train one dictionary jpayne@69: * per use case. jpayne@69: * jpayne@69: * Pass those samples to `ZDICT_trainFromBuffer()` and that will train your jpayne@69: * dictionary. There are a few advanced versions of this function, but this jpayne@69: * is a great starting point. If you want to further tune your dictionary jpayne@69: * you could try `ZDICT_optimizeTrainFromBuffer_cover()`. If that is too slow jpayne@69: * you can try `ZDICT_optimizeTrainFromBuffer_fastCover()`. jpayne@69: * jpayne@69: * If the dictionary training function fails, that is likely because you jpayne@69: * either passed too few samples, or a dictionary would not be effective jpayne@69: * for your data. Look at the messages that the dictionary trainer printed, jpayne@69: * if it doesn't say too few samples, then a dictionary would not be effective. jpayne@69: * jpayne@69: * How large should my dictionary be? jpayne@69: * ---------------------------------- jpayne@69: * jpayne@69: * A reasonable dictionary size, the `dictBufferCapacity`, is about 100KB. jpayne@69: * The zstd CLI defaults to a 110KB dictionary. You likely don't need a jpayne@69: * dictionary larger than that. But, most use cases can get away with a jpayne@69: * smaller dictionary. The advanced dictionary builders can automatically jpayne@69: * shrink the dictionary for you, and select the smallest size that doesn't jpayne@69: * hurt compression ratio too much. See the `shrinkDict` parameter. jpayne@69: * A smaller dictionary can save memory, and potentially speed up jpayne@69: * compression. jpayne@69: * jpayne@69: * How many samples should I provide to the dictionary builder? jpayne@69: * ------------------------------------------------------------ jpayne@69: * jpayne@69: * We generally recommend passing ~100x the size of the dictionary jpayne@69: * in samples. A few thousand should suffice. Having too few samples jpayne@69: * can hurt the dictionaries effectiveness. Having more samples will jpayne@69: * only improve the dictionaries effectiveness. But having too many jpayne@69: * samples can slow down the dictionary builder. jpayne@69: * jpayne@69: * How do I determine if a dictionary will be effective? jpayne@69: * ----------------------------------------------------- jpayne@69: * jpayne@69: * Simply train a dictionary and try it out. You can use zstd's built in jpayne@69: * benchmarking tool to test the dictionary effectiveness. jpayne@69: * jpayne@69: * # Benchmark levels 1-3 without a dictionary jpayne@69: * zstd -b1e3 -r /path/to/my/files jpayne@69: * # Benchmark levels 1-3 with a dictionary jpayne@69: * zstd -b1e3 -r /path/to/my/files -D /path/to/my/dictionary jpayne@69: * jpayne@69: * When should I retrain a dictionary? jpayne@69: * ----------------------------------- jpayne@69: * jpayne@69: * You should retrain a dictionary when its effectiveness drops. Dictionary jpayne@69: * effectiveness drops as the data you are compressing changes. Generally, we do jpayne@69: * expect dictionaries to "decay" over time, as your data changes, but the rate jpayne@69: * at which they decay depends on your use case. Internally, we regularly jpayne@69: * retrain dictionaries, and if the new dictionary performs significantly jpayne@69: * better than the old dictionary, we will ship the new dictionary. jpayne@69: * jpayne@69: * I have a raw content dictionary, how do I turn it into a zstd dictionary? jpayne@69: * ------------------------------------------------------------------------- jpayne@69: * jpayne@69: * If you have a raw content dictionary, e.g. by manually constructing it, or jpayne@69: * using a third-party dictionary builder, you can turn it into a zstd jpayne@69: * dictionary by using `ZDICT_finalizeDictionary()`. You'll also have to jpayne@69: * provide some samples of the data. It will add the zstd header to the jpayne@69: * raw content, which contains a dictionary ID and entropy tables, which jpayne@69: * will improve compression ratio, and allow zstd to write the dictionary ID jpayne@69: * into the frame, if you so choose. jpayne@69: * jpayne@69: * Do I have to use zstd's dictionary builder? jpayne@69: * ------------------------------------------- jpayne@69: * jpayne@69: * No! You can construct dictionary content however you please, it is just jpayne@69: * bytes. It will always be valid as a raw content dictionary. If you want jpayne@69: * a zstd dictionary, which can improve compression ratio, use jpayne@69: * `ZDICT_finalizeDictionary()`. jpayne@69: * jpayne@69: * What is the attack surface of a zstd dictionary? jpayne@69: * ------------------------------------------------ jpayne@69: * jpayne@69: * Zstd is heavily fuzz tested, including loading fuzzed dictionaries, so jpayne@69: * zstd should never crash, or access out-of-bounds memory no matter what jpayne@69: * the dictionary is. However, if an attacker can control the dictionary jpayne@69: * during decompression, they can cause zstd to generate arbitrary bytes, jpayne@69: * just like if they controlled the compressed data. jpayne@69: * jpayne@69: ******************************************************************************/ jpayne@69: jpayne@69: jpayne@69: /*! ZDICT_trainFromBuffer(): jpayne@69: * Train a dictionary from an array of samples. jpayne@69: * Redirect towards ZDICT_optimizeTrainFromBuffer_fastCover() single-threaded, with d=8, steps=4, jpayne@69: * f=20, and accel=1. jpayne@69: * Samples must be stored concatenated in a single flat buffer `samplesBuffer`, jpayne@69: * supplied with an array of sizes `samplesSizes`, providing the size of each sample, in order. jpayne@69: * The resulting dictionary will be saved into `dictBuffer`. jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * Note: Dictionary training will fail if there are not enough samples to construct a jpayne@69: * dictionary, or if most of the samples are too small (< 8 bytes being the lower limit). jpayne@69: * If dictionary training fails, you should use zstd without a dictionary, as the dictionary jpayne@69: * would've been ineffective anyways. If you believe your samples would benefit from a dictionary jpayne@69: * please open an issue with details, and we can look into it. jpayne@69: * Note: ZDICT_trainFromBuffer()'s memory usage is about 6 MB. jpayne@69: * Tips: In general, a reasonable dictionary has a size of ~ 100 KB. jpayne@69: * It's possible to select smaller or larger size, just by specifying `dictBufferCapacity`. jpayne@69: * In general, it's recommended to provide a few thousands samples, though this can vary a lot. jpayne@69: * It's recommended that total size of all samples be about ~x100 times the target size of dictionary. jpayne@69: */ jpayne@69: ZDICTLIB_API size_t ZDICT_trainFromBuffer(void* dictBuffer, size_t dictBufferCapacity, jpayne@69: const void* samplesBuffer, jpayne@69: const size_t* samplesSizes, unsigned nbSamples); jpayne@69: jpayne@69: typedef struct { jpayne@69: int compressionLevel; /**< optimize for a specific zstd compression level; 0 means default */ jpayne@69: unsigned notificationLevel; /**< Write log to stderr; 0 = none (default); 1 = errors; 2 = progression; 3 = details; 4 = debug; */ jpayne@69: unsigned dictID; /**< force dictID value; 0 means auto mode (32-bits random value) jpayne@69: * NOTE: The zstd format reserves some dictionary IDs for future use. jpayne@69: * You may use them in private settings, but be warned that they jpayne@69: * may be used by zstd in a public dictionary registry in the future. jpayne@69: * These dictionary IDs are: jpayne@69: * - low range : <= 32767 jpayne@69: * - high range : >= (2^31) jpayne@69: */ jpayne@69: } ZDICT_params_t; jpayne@69: jpayne@69: /*! ZDICT_finalizeDictionary(): jpayne@69: * Given a custom content as a basis for dictionary, and a set of samples, jpayne@69: * finalize dictionary by adding headers and statistics according to the zstd jpayne@69: * dictionary format. jpayne@69: * jpayne@69: * Samples must be stored concatenated in a flat buffer `samplesBuffer`, jpayne@69: * supplied with an array of sizes `samplesSizes`, providing the size of each jpayne@69: * sample in order. The samples are used to construct the statistics, so they jpayne@69: * should be representative of what you will compress with this dictionary. jpayne@69: * jpayne@69: * The compression level can be set in `parameters`. You should pass the jpayne@69: * compression level you expect to use in production. The statistics for each jpayne@69: * compression level differ, so tuning the dictionary for the compression level jpayne@69: * can help quite a bit. jpayne@69: * jpayne@69: * You can set an explicit dictionary ID in `parameters`, or allow us to pick jpayne@69: * a random dictionary ID for you, but we can't guarantee no collisions. jpayne@69: * jpayne@69: * The dstDictBuffer and the dictContent may overlap, and the content will be jpayne@69: * appended to the end of the header. If the header + the content doesn't fit in jpayne@69: * maxDictSize the beginning of the content is truncated to make room, since it jpayne@69: * is presumed that the most profitable content is at the end of the dictionary, jpayne@69: * since that is the cheapest to reference. jpayne@69: * jpayne@69: * `maxDictSize` must be >= max(dictContentSize, ZSTD_DICTSIZE_MIN). jpayne@69: * jpayne@69: * @return: size of dictionary stored into `dstDictBuffer` (<= `maxDictSize`), jpayne@69: * or an error code, which can be tested by ZDICT_isError(). jpayne@69: * Note: ZDICT_finalizeDictionary() will push notifications into stderr if jpayne@69: * instructed to, using notificationLevel>0. jpayne@69: * NOTE: This function currently may fail in several edge cases including: jpayne@69: * * Not enough samples jpayne@69: * * Samples are uncompressible jpayne@69: * * Samples are all exactly the same jpayne@69: */ jpayne@69: ZDICTLIB_API size_t ZDICT_finalizeDictionary(void* dstDictBuffer, size_t maxDictSize, jpayne@69: const void* dictContent, size_t dictContentSize, jpayne@69: const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_params_t parameters); jpayne@69: jpayne@69: jpayne@69: /*====== Helper functions ======*/ jpayne@69: ZDICTLIB_API unsigned ZDICT_getDictID(const void* dictBuffer, size_t dictSize); /**< extracts dictID; @return zero if error (not a valid dictionary) */ jpayne@69: ZDICTLIB_API size_t ZDICT_getDictHeaderSize(const void* dictBuffer, size_t dictSize); /* returns dict header size; returns a ZSTD error code on failure */ jpayne@69: ZDICTLIB_API unsigned ZDICT_isError(size_t errorCode); jpayne@69: ZDICTLIB_API const char* ZDICT_getErrorName(size_t errorCode); jpayne@69: jpayne@69: #endif /* ZSTD_ZDICT_H */ jpayne@69: jpayne@69: #if defined(ZDICT_STATIC_LINKING_ONLY) && !defined(ZSTD_ZDICT_H_STATIC) jpayne@69: #define ZSTD_ZDICT_H_STATIC jpayne@69: jpayne@69: /* This can be overridden externally to hide static symbols. */ jpayne@69: #ifndef ZDICTLIB_STATIC_API jpayne@69: # if defined(ZSTD_DLL_EXPORT) && (ZSTD_DLL_EXPORT==1) jpayne@69: # define ZDICTLIB_STATIC_API __declspec(dllexport) ZDICTLIB_VISIBLE jpayne@69: # elif defined(ZSTD_DLL_IMPORT) && (ZSTD_DLL_IMPORT==1) jpayne@69: # define ZDICTLIB_STATIC_API __declspec(dllimport) ZDICTLIB_VISIBLE jpayne@69: # else jpayne@69: # define ZDICTLIB_STATIC_API ZDICTLIB_VISIBLE jpayne@69: # endif jpayne@69: #endif jpayne@69: jpayne@69: /* ==================================================================================== jpayne@69: * The definitions in this section are considered experimental. jpayne@69: * They should never be used with a dynamic library, as they may change in the future. jpayne@69: * They are provided for advanced usages. jpayne@69: * Use them only in association with static linking. jpayne@69: * ==================================================================================== */ jpayne@69: jpayne@69: #define ZDICT_DICTSIZE_MIN 256 jpayne@69: /* Deprecated: Remove in v1.6.0 */ jpayne@69: #define ZDICT_CONTENTSIZE_MIN 128 jpayne@69: jpayne@69: /*! ZDICT_cover_params_t: jpayne@69: * k and d are the only required parameters. jpayne@69: * For others, value 0 means default. jpayne@69: */ jpayne@69: typedef struct { jpayne@69: unsigned k; /* Segment size : constraint: 0 < k : Reasonable range [16, 2048+] */ jpayne@69: unsigned d; /* dmer size : constraint: 0 < d <= k : Reasonable range [6, 16] */ jpayne@69: unsigned steps; /* Number of steps : Only used for optimization : 0 means default (40) : Higher means more parameters checked */ jpayne@69: unsigned nbThreads; /* Number of threads : constraint: 0 < nbThreads : 1 means single-threaded : Only used for optimization : Ignored if ZSTD_MULTITHREAD is not defined */ jpayne@69: double splitPoint; /* Percentage of samples used for training: Only used for optimization : the first nbSamples * splitPoint samples will be used to training, the last nbSamples * (1 - splitPoint) samples will be used for testing, 0 means default (1.0), 1.0 when all samples are used for both training and testing */ jpayne@69: unsigned shrinkDict; /* Train dictionaries to shrink in size starting from the minimum size and selects the smallest dictionary that is shrinkDictMaxRegression% worse than the largest dictionary. 0 means no shrinking and 1 means shrinking */ jpayne@69: unsigned shrinkDictMaxRegression; /* Sets shrinkDictMaxRegression so that a smaller dictionary can be at worse shrinkDictMaxRegression% worse than the max dict size dictionary. */ jpayne@69: ZDICT_params_t zParams; jpayne@69: } ZDICT_cover_params_t; jpayne@69: jpayne@69: typedef struct { jpayne@69: unsigned k; /* Segment size : constraint: 0 < k : Reasonable range [16, 2048+] */ jpayne@69: unsigned d; /* dmer size : constraint: 0 < d <= k : Reasonable range [6, 16] */ jpayne@69: unsigned f; /* log of size of frequency array : constraint: 0 < f <= 31 : 1 means default(20)*/ jpayne@69: unsigned steps; /* Number of steps : Only used for optimization : 0 means default (40) : Higher means more parameters checked */ jpayne@69: unsigned nbThreads; /* Number of threads : constraint: 0 < nbThreads : 1 means single-threaded : Only used for optimization : Ignored if ZSTD_MULTITHREAD is not defined */ jpayne@69: double splitPoint; /* Percentage of samples used for training: Only used for optimization : the first nbSamples * splitPoint samples will be used to training, the last nbSamples * (1 - splitPoint) samples will be used for testing, 0 means default (0.75), 1.0 when all samples are used for both training and testing */ jpayne@69: unsigned accel; /* Acceleration level: constraint: 0 < accel <= 10, higher means faster and less accurate, 0 means default(1) */ jpayne@69: unsigned shrinkDict; /* Train dictionaries to shrink in size starting from the minimum size and selects the smallest dictionary that is shrinkDictMaxRegression% worse than the largest dictionary. 0 means no shrinking and 1 means shrinking */ jpayne@69: unsigned shrinkDictMaxRegression; /* Sets shrinkDictMaxRegression so that a smaller dictionary can be at worse shrinkDictMaxRegression% worse than the max dict size dictionary. */ jpayne@69: jpayne@69: ZDICT_params_t zParams; jpayne@69: } ZDICT_fastCover_params_t; jpayne@69: jpayne@69: /*! ZDICT_trainFromBuffer_cover(): jpayne@69: * Train a dictionary from an array of samples using the COVER algorithm. jpayne@69: * Samples must be stored concatenated in a single flat buffer `samplesBuffer`, jpayne@69: * supplied with an array of sizes `samplesSizes`, providing the size of each sample, in order. jpayne@69: * The resulting dictionary will be saved into `dictBuffer`. jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * See ZDICT_trainFromBuffer() for details on failure modes. jpayne@69: * Note: ZDICT_trainFromBuffer_cover() requires about 9 bytes of memory for each input byte. jpayne@69: * Tips: In general, a reasonable dictionary has a size of ~ 100 KB. jpayne@69: * It's possible to select smaller or larger size, just by specifying `dictBufferCapacity`. jpayne@69: * In general, it's recommended to provide a few thousands samples, though this can vary a lot. jpayne@69: * It's recommended that total size of all samples be about ~x100 times the target size of dictionary. jpayne@69: */ jpayne@69: ZDICTLIB_STATIC_API size_t ZDICT_trainFromBuffer_cover( jpayne@69: void *dictBuffer, size_t dictBufferCapacity, jpayne@69: const void *samplesBuffer, const size_t *samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_cover_params_t parameters); jpayne@69: jpayne@69: /*! ZDICT_optimizeTrainFromBuffer_cover(): jpayne@69: * The same requirements as above hold for all the parameters except `parameters`. jpayne@69: * This function tries many parameter combinations and picks the best parameters. jpayne@69: * `*parameters` is filled with the best parameters found, jpayne@69: * dictionary constructed with those parameters is stored in `dictBuffer`. jpayne@69: * jpayne@69: * All of the parameters d, k, steps are optional. jpayne@69: * If d is non-zero then we don't check multiple values of d, otherwise we check d = {6, 8}. jpayne@69: * if steps is zero it defaults to its default value. jpayne@69: * If k is non-zero then we don't check multiple values of k, otherwise we check steps values in [50, 2000]. jpayne@69: * jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * On success `*parameters` contains the parameters selected. jpayne@69: * See ZDICT_trainFromBuffer() for details on failure modes. jpayne@69: * Note: ZDICT_optimizeTrainFromBuffer_cover() requires about 8 bytes of memory for each input byte and additionally another 5 bytes of memory for each byte of memory for each thread. jpayne@69: */ jpayne@69: ZDICTLIB_STATIC_API size_t ZDICT_optimizeTrainFromBuffer_cover( jpayne@69: void* dictBuffer, size_t dictBufferCapacity, jpayne@69: const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_cover_params_t* parameters); jpayne@69: jpayne@69: /*! ZDICT_trainFromBuffer_fastCover(): jpayne@69: * Train a dictionary from an array of samples using a modified version of COVER algorithm. jpayne@69: * Samples must be stored concatenated in a single flat buffer `samplesBuffer`, jpayne@69: * supplied with an array of sizes `samplesSizes`, providing the size of each sample, in order. jpayne@69: * d and k are required. jpayne@69: * All other parameters are optional, will use default values if not provided jpayne@69: * The resulting dictionary will be saved into `dictBuffer`. jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * See ZDICT_trainFromBuffer() for details on failure modes. jpayne@69: * Note: ZDICT_trainFromBuffer_fastCover() requires 6 * 2^f bytes of memory. jpayne@69: * Tips: In general, a reasonable dictionary has a size of ~ 100 KB. jpayne@69: * It's possible to select smaller or larger size, just by specifying `dictBufferCapacity`. jpayne@69: * In general, it's recommended to provide a few thousands samples, though this can vary a lot. jpayne@69: * It's recommended that total size of all samples be about ~x100 times the target size of dictionary. jpayne@69: */ jpayne@69: ZDICTLIB_STATIC_API size_t ZDICT_trainFromBuffer_fastCover(void *dictBuffer, jpayne@69: size_t dictBufferCapacity, const void *samplesBuffer, jpayne@69: const size_t *samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_fastCover_params_t parameters); jpayne@69: jpayne@69: /*! ZDICT_optimizeTrainFromBuffer_fastCover(): jpayne@69: * The same requirements as above hold for all the parameters except `parameters`. jpayne@69: * This function tries many parameter combinations (specifically, k and d combinations) jpayne@69: * and picks the best parameters. `*parameters` is filled with the best parameters found, jpayne@69: * dictionary constructed with those parameters is stored in `dictBuffer`. jpayne@69: * All of the parameters d, k, steps, f, and accel are optional. jpayne@69: * If d is non-zero then we don't check multiple values of d, otherwise we check d = {6, 8}. jpayne@69: * if steps is zero it defaults to its default value. jpayne@69: * If k is non-zero then we don't check multiple values of k, otherwise we check steps values in [50, 2000]. jpayne@69: * If f is zero, default value of 20 is used. jpayne@69: * If accel is zero, default value of 1 is used. jpayne@69: * jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * On success `*parameters` contains the parameters selected. jpayne@69: * See ZDICT_trainFromBuffer() for details on failure modes. jpayne@69: * Note: ZDICT_optimizeTrainFromBuffer_fastCover() requires about 6 * 2^f bytes of memory for each thread. jpayne@69: */ jpayne@69: ZDICTLIB_STATIC_API size_t ZDICT_optimizeTrainFromBuffer_fastCover(void* dictBuffer, jpayne@69: size_t dictBufferCapacity, const void* samplesBuffer, jpayne@69: const size_t* samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_fastCover_params_t* parameters); jpayne@69: jpayne@69: typedef struct { jpayne@69: unsigned selectivityLevel; /* 0 means default; larger => select more => larger dictionary */ jpayne@69: ZDICT_params_t zParams; jpayne@69: } ZDICT_legacy_params_t; jpayne@69: jpayne@69: /*! ZDICT_trainFromBuffer_legacy(): jpayne@69: * Train a dictionary from an array of samples. jpayne@69: * Samples must be stored concatenated in a single flat buffer `samplesBuffer`, jpayne@69: * supplied with an array of sizes `samplesSizes`, providing the size of each sample, in order. jpayne@69: * The resulting dictionary will be saved into `dictBuffer`. jpayne@69: * `parameters` is optional and can be provided with values set to 0 to mean "default". jpayne@69: * @return: size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) jpayne@69: * or an error code, which can be tested with ZDICT_isError(). jpayne@69: * See ZDICT_trainFromBuffer() for details on failure modes. jpayne@69: * Tips: In general, a reasonable dictionary has a size of ~ 100 KB. jpayne@69: * It's possible to select smaller or larger size, just by specifying `dictBufferCapacity`. jpayne@69: * In general, it's recommended to provide a few thousands samples, though this can vary a lot. jpayne@69: * It's recommended that total size of all samples be about ~x100 times the target size of dictionary. jpayne@69: * Note: ZDICT_trainFromBuffer_legacy() will send notifications into stderr if instructed to, using notificationLevel>0. jpayne@69: */ jpayne@69: ZDICTLIB_STATIC_API size_t ZDICT_trainFromBuffer_legacy( jpayne@69: void* dictBuffer, size_t dictBufferCapacity, jpayne@69: const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples, jpayne@69: ZDICT_legacy_params_t parameters); jpayne@69: jpayne@69: jpayne@69: /* Deprecation warnings */ jpayne@69: /* It is generally possible to disable deprecation warnings from compiler, jpayne@69: for example with -Wno-deprecated-declarations for gcc jpayne@69: or _CRT_SECURE_NO_WARNINGS in Visual. jpayne@69: Otherwise, it's also possible to manually define ZDICT_DISABLE_DEPRECATE_WARNINGS */ jpayne@69: #ifdef ZDICT_DISABLE_DEPRECATE_WARNINGS jpayne@69: # define ZDICT_DEPRECATED(message) /* disable deprecation warnings */ jpayne@69: #else jpayne@69: # define ZDICT_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) jpayne@69: # if defined (__cplusplus) && (__cplusplus >= 201402) /* C++14 or greater */ jpayne@69: # define ZDICT_DEPRECATED(message) [[deprecated(message)]] jpayne@69: # elif defined(__clang__) || (ZDICT_GCC_VERSION >= 405) jpayne@69: # define ZDICT_DEPRECATED(message) __attribute__((deprecated(message))) jpayne@69: # elif (ZDICT_GCC_VERSION >= 301) jpayne@69: # define ZDICT_DEPRECATED(message) __attribute__((deprecated)) jpayne@69: # elif defined(_MSC_VER) jpayne@69: # define ZDICT_DEPRECATED(message) __declspec(deprecated(message)) jpayne@69: # else jpayne@69: # pragma message("WARNING: You need to implement ZDICT_DEPRECATED for this compiler") jpayne@69: # define ZDICT_DEPRECATED(message) jpayne@69: # endif jpayne@69: #endif /* ZDICT_DISABLE_DEPRECATE_WARNINGS */ jpayne@69: jpayne@69: ZDICT_DEPRECATED("use ZDICT_finalizeDictionary() instead") jpayne@69: ZDICTLIB_STATIC_API jpayne@69: size_t ZDICT_addEntropyTablesFromBuffer(void* dictBuffer, size_t dictContentSize, size_t dictBufferCapacity, jpayne@69: const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples); jpayne@69: jpayne@69: jpayne@69: #endif /* ZSTD_ZDICT_H_STATIC */ jpayne@69: jpayne@69: #if defined (__cplusplus) jpayne@69: } jpayne@69: #endif