123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314 |
- /*************************************************************************/
- /* compressed_translation.cpp */
- /*************************************************************************/
- /* This file is part of: */
- /* GODOT ENGINE */
- /* https://godotengine.org */
- /*************************************************************************/
- /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
- /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
- /* */
- /* Permission is hereby granted, free of charge, to any person obtaining */
- /* a copy of this software and associated documentation files (the */
- /* "Software"), to deal in the Software without restriction, including */
- /* without limitation the rights to use, copy, modify, merge, publish, */
- /* distribute, sublicense, and/or sell copies of the Software, and to */
- /* permit persons to whom the Software is furnished to do so, subject to */
- /* the following conditions: */
- /* */
- /* The above copyright notice and this permission notice shall be */
- /* included in all copies or substantial portions of the Software. */
- /* */
- /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
- /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
- /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
- /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
- /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
- /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
- /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
- /*************************************************************************/
- #include "compressed_translation.h"
- #include "pair.h"
- extern "C" {
- #include "thirdparty/misc/smaz.h"
- }
- struct _PHashTranslationCmp {
- int orig_len;
- CharString compressed;
- int offset;
- };
- void PHashTranslation::generate(const Ref<Translation> &p_from) {
- #ifdef TOOLS_ENABLED
- List<StringName> keys;
- p_from->get_message_list(&keys);
- int size = Math::larger_prime(keys.size());
- print_line("compressing keys: " + itos(keys.size()));
- Vector<Vector<Pair<int, CharString> > > buckets;
- Vector<Map<uint32_t, int> > table;
- Vector<uint32_t> hfunc_table;
- Vector<_PHashTranslationCmp> compressed;
- table.resize(size);
- hfunc_table.resize(size);
- buckets.resize(size);
- compressed.resize(keys.size());
- int idx = 0;
- int total_compression_size = 0;
- int total_string_size = 0;
- for (List<StringName>::Element *E = keys.front(); E; E = E->next()) {
- //hash string
- CharString cs = E->get().operator String().utf8();
- uint32_t h = hash(0, cs.get_data());
- Pair<int, CharString> p;
- p.first = idx;
- p.second = cs;
- buckets[h % size].push_back(p);
- //compress string
- CharString src_s = p_from->get_message(E->get()).operator String().utf8();
- _PHashTranslationCmp ps;
- ps.orig_len = src_s.size();
- ps.offset = total_compression_size;
- if (ps.orig_len != 0) {
- CharString dst_s;
- dst_s.resize(src_s.size());
- int ret = smaz_compress(src_s.get_data(), src_s.size(), &dst_s[0], src_s.size());
- if (ret >= src_s.size()) {
- //if compressed is larger than original, just use original
- ps.orig_len = src_s.size();
- ps.compressed = src_s;
- } else {
- dst_s.resize(ret);
- //ps.orig_len=;
- ps.compressed = dst_s;
- }
- } else {
- ps.orig_len = 1;
- ps.compressed.resize(1);
- ps.compressed[0] = 0;
- }
- compressed[idx] = ps;
- total_compression_size += ps.compressed.size();
- total_string_size += src_s.size();
- idx++;
- }
- int bucket_table_size = 0;
- print_line("total compressed string size: " + itos(total_compression_size) + " (" + itos(total_string_size) + " uncompressed).");
- for (int i = 0; i < size; i++) {
- Vector<Pair<int, CharString> > &b = buckets[i];
- Map<uint32_t, int> &t = table[i];
- if (b.size() == 0)
- continue;
- //print_line("bucket: "+itos(i)+" - elements: "+itos(b.size()));
- int d = 1;
- int item = 0;
- while (item < b.size()) {
- uint32_t slot = hash(d, b[item].second.get_data());
- if (t.has(slot)) {
- item = 0;
- d++;
- t.clear();
- } else {
- t[slot] = b[item].first;
- item++;
- }
- }
- hfunc_table[i] = d;
- bucket_table_size += 2 + b.size() * 4;
- }
- print_line("bucket table size: " + itos(bucket_table_size * 4));
- print_line("hash table size: " + itos(size * 4));
- hash_table.resize(size);
- bucket_table.resize(bucket_table_size);
- DVector<int>::Write htwb = hash_table.write();
- DVector<int>::Write btwb = bucket_table.write();
- uint32_t *htw = (uint32_t *)&htwb[0];
- uint32_t *btw = (uint32_t *)&btwb[0];
- int btindex = 0;
- int collisions = 0;
- for (int i = 0; i < size; i++) {
- Map<uint32_t, int> &t = table[i];
- if (t.size() == 0) {
- htw[i] = 0xFFFFFFFF; //nothing
- continue;
- } else if (t.size() > 1) {
- collisions += t.size() - 1;
- }
- htw[i] = btindex;
- btw[btindex++] = t.size();
- btw[btindex++] = hfunc_table[i];
- for (Map<uint32_t, int>::Element *E = t.front(); E; E = E->next()) {
- btw[btindex++] = E->key();
- btw[btindex++] = compressed[E->get()].offset;
- btw[btindex++] = compressed[E->get()].compressed.size();
- btw[btindex++] = compressed[E->get()].orig_len;
- }
- }
- print_line("total collisions: " + itos(collisions));
- strings.resize(total_compression_size);
- DVector<uint8_t>::Write cw = strings.write();
- for (int i = 0; i < compressed.size(); i++) {
- memcpy(&cw[compressed[i].offset], compressed[i].compressed.get_data(), compressed[i].compressed.size());
- }
- ERR_FAIL_COND(btindex != bucket_table_size);
- set_locale(p_from->get_locale());
- #endif
- }
- bool PHashTranslation::_set(const StringName &p_name, const Variant &p_value) {
- String name = p_name.operator String();
- if (name == "hash_table") {
- hash_table = p_value;
- //print_line("translation: loaded hash table of size: "+itos(hash_table.size()));
- } else if (name == "bucket_table") {
- bucket_table = p_value;
- //print_line("translation: loaded bucket table of size: "+itos(bucket_table.size()));
- } else if (name == "strings") {
- strings = p_value;
- //print_line("translation: loaded string table of size: "+itos(strings.size()));
- } else if (name == "load_from") {
- //print_line("generating");
- generate(p_value);
- } else
- return false;
- return true;
- }
- bool PHashTranslation::_get(const StringName &p_name, Variant &r_ret) const {
- String name = p_name.operator String();
- if (name == "hash_table")
- r_ret = hash_table;
- else if (name == "bucket_table")
- r_ret = bucket_table;
- else if (name == "strings")
- r_ret = strings;
- else
- return false;
- return true;
- }
- StringName PHashTranslation::get_message(const StringName &p_src_text) const {
- int htsize = hash_table.size();
- if (htsize == 0)
- return StringName();
- CharString str = p_src_text.operator String().utf8();
- uint32_t h = hash(0, str.get_data());
- DVector<int>::Read htr = hash_table.read();
- const uint32_t *htptr = (const uint32_t *)&htr[0];
- DVector<int>::Read btr = bucket_table.read();
- const uint32_t *btptr = (const uint32_t *)&btr[0];
- DVector<uint8_t>::Read sr = strings.read();
- const char *sptr = (const char *)&sr[0];
- uint32_t p = htptr[h % htsize];
- //print_line("String: "+p_src_text.operator String());
- //print_line("Hash: "+itos(p));
- if (p == 0xFFFFFFFF) {
- // print_line("GETMSG: Nothing!");
- return StringName(); //nothing
- }
- const Bucket &bucket = *(const Bucket *)&btptr[p];
- h = hash(bucket.func, str.get_data());
- int idx = -1;
- for (int i = 0; i < bucket.size; i++) {
- if (bucket.elem[i].key == h) {
- idx = i;
- break;
- }
- }
- //print_line("bucket pos: "+itos(idx));
- if (idx == -1) {
- // print_line("GETMSG: Not in Bucket!");
- return StringName();
- }
- if (bucket.elem[idx].comp_size == bucket.elem[idx].uncomp_size) {
- String rstr;
- rstr.parse_utf8(&sptr[bucket.elem[idx].str_offset], bucket.elem[idx].uncomp_size);
- // print_line("Uncompressed, size: "+itos(bucket.elem[idx].comp_size));
- // print_line("Return: "+rstr);
- return rstr;
- } else {
- CharString uncomp;
- uncomp.resize(bucket.elem[idx].uncomp_size + 1);
- smaz_decompress(&sptr[bucket.elem[idx].str_offset], bucket.elem[idx].comp_size, uncomp.ptr(), bucket.elem[idx].uncomp_size);
- String rstr;
- rstr.parse_utf8(uncomp.get_data());
- // print_line("Compressed, size: "+itos(bucket.elem[idx].comp_size));
- // print_line("Return: "+rstr);
- return rstr;
- }
- }
- void PHashTranslation::_get_property_list(List<PropertyInfo> *p_list) const {
- p_list->push_back(PropertyInfo(Variant::INT_ARRAY, "hash_table"));
- p_list->push_back(PropertyInfo(Variant::INT_ARRAY, "bucket_table"));
- p_list->push_back(PropertyInfo(Variant::RAW_ARRAY, "strings"));
- p_list->push_back(PropertyInfo(Variant::OBJECT, "load_from", PROPERTY_HINT_RESOURCE_TYPE, "Translation", PROPERTY_USAGE_EDITOR));
- }
- void PHashTranslation::_bind_methods() {
- ObjectTypeDB::bind_method(_MD("generate", "from:Translation"), &PHashTranslation::generate);
- }
- PHashTranslation::PHashTranslation() {
- }
|