#!/usr/bin/python """I'd like to be able to do word-game stuff in the browser. But my wordlist is largish (1.3MB). But you can probably do some reasonable things with just a prefix search. You can divide the wordlist into files with a common prefix... How many characters of prefix do you have to bucket the wordlist by to get adequately small files? """ import json, sys, os def get_wordlist(): for line in file('wordlist'): freq, word = line.split() yield word def bucket_wordlist_by_prefix(length): rv = {} for word in get_wordlist(): prefix = word[:length] if prefix not in rv: rv[prefix] = [] rv[prefix].append(word) return rv def max_json_size_by_prefix(length): buckets = bucket_wordlist_by_prefix(length) return max(len(json.dumps(buckets[prefix])) for prefix in buckets) def download_time(size, kbps): seconds = float(size*8)/(1000*kbps) return "download time at %s kbps is %.3fs" % (kbps, seconds) def evaluate(): for length in range(10): size = max_json_size_by_prefix(length) dtime = download_time(size, kbps=56) print "max JSON size for prefix", length, "is", size, ';', dtime def create(filename): dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) return file(filename, 'w') def make_json_files(prefix_length): buckets = bucket_wordlist_by_prefix(prefix_length) half_prefix_length = (prefix_length + 1) // 2 for prefix in buckets: outfile = create('jsonwords/%s/%s.json' % (prefix[:half_prefix_length], prefix)) json.dump(buckets[prefix], outfile) outfile.close() def main(): if sys.argv[1] == 'evaluate': evaluate() else: make_json_files(int(sys.argv[1])) if __name__ == '__main__': main()

AltStyle によって変換されたページ (->オリジナル) /