1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
|
/* -*- c-basic-offset: 4; indent-tabs-mode: nil -*- */
/* ====================================================================
* Copyright (c) 1999-2007 Carnegie Mellon University. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* This work was supported in part by funding from the Defense Advanced
* Research Projects Agency and the National Science Foundation of the
* United States of America, and the CMU Sphinx Speech Consortium.
*
* THIS SOFTWARE IS PROVIDED BY CARNEGIE MELLON UNIVERSITY ``AS IS'' AND
* ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY
* NOR ITS EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ====================================================================
*
*/
/*
* \file ngram_model_internal.h Internal structures for N-Gram models
*
* Author: David Huggins-Daines <dhuggins@cs.cmu.edu>
*/
#ifndef __NGRAM_MODEL_INTERNAL_H__
#define __NGRAM_MODEL_INTERNAL_H__
#include "sphinxbase/ngram_model.h"
#include "sphinxbase/hash_table.h"
/**
* Common implementation of ngram_model_t.
*
* The details of bigram, trigram, and higher-order N-gram storage, if any, can
* vary somewhat depending on the file format in use.
*/
struct ngram_model_s {
int refcount; /**< Reference count */
int32 *n_counts; /**< Counts for 1, 2, 3, ... grams */
int32 n_1g_alloc; /**< Number of allocated word strings (for new word addition) */
int32 n_words; /**< Number of actual word strings (NOT the same as the
number of unigrams, due to class words). */
uint8 n; /**< This is an n-gram model (1, 2, 3, ...). */
uint8 n_classes; /**< Number of classes (maximum 128) */
uint8 writable; /**< Are word strings writable? */
uint8 flags; /**< Any other flags we might care about
(FIXME: Merge this and writable) */
logmath_t *lmath; /**< Log-math object */
float32 lw; /**< Language model scaling factor */
int32 log_wip; /**< Log of word insertion penalty */
int32 log_uw; /**< Log of unigram weight */
int32 log_uniform; /**< Log of uniform (0-gram) probability */
int32 log_uniform_weight; /**< Log of uniform weight (i.e. 1 - unigram weight) */
int32 log_zero; /**< Zero probability, cached here for quick lookup */
char **word_str; /**< Unigram names */
hash_table_t *wid; /**< Mapping of unigram names to word IDs. */
int32 *tmp_wids; /**< Temporary array of word IDs for ngram_model_get_ngram() */
struct ngram_class_s **classes; /**< Word class definitions. */
struct ngram_funcs_s *funcs; /**< Implementation-specific methods. */
};
/**
* Implementation of ngram_class_t.
*/
struct ngram_class_s {
int32 tag_wid; /**< Base word ID for this class tag */
int32 start_wid; /**< Starting base word ID for this class' words */
int32 n_words; /**< Number of base words for this class */
int32 *prob1; /**< Probability table for base words */
/**
* Custom hash table for additional words.
*/
struct ngram_hash_s {
int32 wid; /**< Word ID of this bucket */
int32 prob1; /**< Probability for this word */
int32 next; /**< Index of next bucket (or -1 for no collision) */
} *nword_hash;
int32 n_hash; /**< Number of buckets in nword_hash (power of 2) */
int32 n_hash_inuse; /**< Number of words in nword_hash */
};
#define NGRAM_HASH_SIZE 128
#define NGRAM_BASEWID(wid) ((wid)&0xffffff)
#define NGRAM_CLASSID(wid) (((wid)>>24) & 0x7f)
#define NGRAM_CLASSWID(wid,classid) (((classid)<<24) | 0x80000000 | (wid))
#define NGRAM_IS_CLASSWID(wid) ((wid)&0x80000000)
#define UG_ALLOC_STEP 10
/** Implementation-specific functions for operating on ngram_model_t objects */
typedef struct ngram_funcs_s {
/**
* Implementation-specific function for freeing an ngram_model_t.
*/
void (*free)(ngram_model_t *model);
/**
* Implementation-specific function for applying language model weights.
*/
int (*apply_weights)(ngram_model_t *model,
float32 lw,
float32 wip,
float32 uw);
/**
* Implementation-specific function for querying language model score.
*/
int32 (*score)(ngram_model_t *model,
int32 wid,
int32 *history,
int32 n_hist,
int32 *n_used);
/**
* Implementation-specific function for querying raw language
* model probability.
*/
int32 (*raw_score)(ngram_model_t *model,
int32 wid,
int32 *history,
int32 n_hist,
int32 *n_used);
/**
* Implementation-specific function for adding unigrams.
*
* This function updates the internal structures of a language
* model to add the given unigram with the given weight (defined
* as a log-factor applied to the uniform distribution). This
* includes reallocating or otherwise resizing the set of unigrams.
*
* @return The language model score (not raw log-probability) of
* the new word, or 0 for failure.
*/
int32 (*add_ug)(ngram_model_t *model,
int32 wid, int32 lweight);
/**
* Implementation-specific function for purging N-Gram cache
*/
void (*flush)(ngram_model_t *model);
/**
* Implementation-specific function for iterating.
*/
ngram_iter_t * (*iter)(ngram_model_t *model, int32 wid, int32 *history, int32 n_hist);
/**
* Implementation-specific function for iterating.
*/
ngram_iter_t * (*mgrams)(ngram_model_t *model, int32 m);
/**
* Implementation-specific function for iterating.
*/
ngram_iter_t * (*successors)(ngram_iter_t *itor);
/**
* Implementation-specific function for iterating.
*/
int32 const * (*iter_get)(ngram_iter_t *itor,
int32 *out_score,
int32 *out_bowt);
/**
* Implementation-specific function for iterating.
*/
ngram_iter_t * (*iter_next)(ngram_iter_t *itor);
/**
* Implementation-specific function for iterating.
*/
void (*iter_free)(ngram_iter_t *itor);
} ngram_funcs_t;
/**
* Base iterator structure for N-grams.
*/
struct ngram_iter_s {
ngram_model_t *model;
int32 *wids; /**< Scratch space for word IDs. */
int16 m; /**< Order of history. */
int16 successor; /**< Is this a successor iterator? */
};
/**
* One class definition from a classdef file.
*/
typedef struct classdef_s {
char **words;
float32 *weights;
int32 n_words;
} classdef_t;
/**
* Initialize the base ngram_model_t structure.
*/
int32
ngram_model_init(ngram_model_t *model,
ngram_funcs_t *funcs,
logmath_t *lmath,
int32 n, int32 n_unigram);
/**
* Read an N-Gram model from an ARPABO text file.
*/
ngram_model_t *ngram_model_arpa_read(cmd_ln_t *config,
const char *file_name,
logmath_t *lmath);
/**
* Read an N-Gram model from a Sphinx .DMP binary file.
*/
ngram_model_t *ngram_model_dmp_read(cmd_ln_t *config,
const char *file_name,
logmath_t *lmath);
/**
* Read an N-Gram model from a Sphinx .DMP32 binary file.
*/
ngram_model_t *ngram_model_dmp32_read(cmd_ln_t *config,
const char *file_name,
logmath_t *lmath);
/**
* Write an N-Gram model to an ARPABO text file.
*/
int ngram_model_arpa_write(ngram_model_t *model,
const char *file_name);
/**
* Write an N-Gram model to a Sphinx .DMP binary file.
*/
int ngram_model_dmp_write(ngram_model_t *model,
const char *file_name);
/**
* Read a probdef file.
*/
int32 read_classdef_file(hash_table_t *classes, const char *classdef_file);
/**
* Free a class definition.
*/
void classdef_free(classdef_t *classdef);
/**
* Allocate and initialize an N-Gram class.
*/
ngram_class_t *ngram_class_new(ngram_model_t *model, int32 tag_wid,
int32 start_wid, glist_t classwords);
/**
* Deallocate an N-Gram class.
*/
void ngram_class_free(ngram_class_t *lmclass);
/**
* Get the in-class log probability for a word in an N-Gram class.
*
* @return This probability, or 1 if word not found.
*/
int32 ngram_class_prob(ngram_class_t *lmclass, int32 wid);
/**
* Initialize base M-Gram iterator structure.
*/
void ngram_iter_init(ngram_iter_t *itor, ngram_model_t *model,
int m, int successor);
#endif /* __NGRAM_MODEL_INTERNAL_H__ */
|