// file: $isip/class/mmedia/TranscriptionDatabase/trans_05.cc // version: $Id: trans_05.cc 9421 2004-03-10 20:11:33Z parihar $ // // isip include files // #include "TranscriptionDatabase.h" // method: assign // // arguments: // const TranscriptionDatabase& arg: (input) transDB object // // return: logical error status // // this method has assigns the input transcription database // bool8 TranscriptionDatabase::assign(const TranscriptionDatabase& arg_a) { // declare local variables // bool8 status = false; // determine if the TranscriptionDatabase objects are equal // status = name_d.assign(arg_a.name_d); status &= hash_index_d.assign(arg_a.hash_index_d); // return the status // return status; } // method: sofSize // // arguments: none // // return: logical error status // // this method determines the size of the object // int32 TranscriptionDatabase::sofSize() const { // declare local variables // int32 size = 0; // get the size of the object // size = name_d.sofSize(); size += hash_index_d.sofSize(); // return the size // return size; } // method: eq // // arguments: // const TranscriptionDatabase& arg: (input) transDB object // // return: logical error status // // this method has determines if the transcription databases are equal // bool8 TranscriptionDatabase::eq(const TranscriptionDatabase& arg_a) const { // declare local variables // bool8 status = false; // determine if the TranscriptionDatabase objects are equal // status = name_d.eq(arg_a.name_d); status &= hash_index_d.eq(arg_a.hash_index_d); // return the status // return status; } // method: clear // // arguments: // Integral::CMODE cmode: (input) clear mode // // return: logical error status // // this method clears the content of the current object // bool8 TranscriptionDatabase::clear(Integral::CMODE cmode_a) { // declare local variables // bool8 status = false; // clear the data members // status = name_d.clear(cmode_a); status &= hash_d.clear(cmode_a); status &= hash_index_d.clear(cmode_a); // return the status // return status; } // method: insertRecord // // arguments: // String& identifier: (input) annotation graph id // AnnotationGraph& graph: (input) desired annotation graph // // return: logical error status // // this method inserts the record into the database // bool8 TranscriptionDatabase::insertRecord(String& identifier_a, AnnotationGraph& graph_a) { // make sure the name of the graph matches the database name // if (!graph_a.getId().eq(name_d)) { return Error::handle(name(), L"insertRecord", Error::ARG, __FILE__, __LINE__); } Long values; // get the keys associated with the hash table // Vector keys; if (!hash_d.keys(keys)) { return Error::handle(name(), L"insertRecord", Error::ARG, __FILE__, __LINE__); } values.assign(keys.length()); hash_index_d.insert(identifier_a, &values); // insert the record into the database // return hash_d.insert(identifier_a, &graph_a); } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // AnnotationGraph& graph: (output) desired annotation graph // // return: logical error status // // this method retrieves the record from the database // bool8 TranscriptionDatabase::getRecord(String& identifier_a, AnnotationGraph& graph_a) { // declare local variables // Long* tag; // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { return false; } // when the identifier exists in the database return a copy of the record // else { graph_a.clear(); if (!graph_a.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getReord", Error::NO_PARAM_FILE, __FILE__, __LINE__); } } // exit gracefully // return true; } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // String& feature: (input) annotation feature // String& value: (input) annotation feature value // DoubleLinkedList& records: (output) list of records // // return: logical error status // // this method retrieves the record from the database // bool8 TranscriptionDatabase::getRecord(String& identifier_a, String& feature_a, String& value_a, DoubleLinkedList& records_a) { // declare local variables // String tmpstr; DoubleLinkedList annos; AnnotationGraph graph; // declare local variables // Long* tag; // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { return false; } // when the identifier exists in the database return a copy of the record // else { graph.clear(); if (!graph.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getReord", Error::NO_PARAM_FILE, __FILE__, __LINE__); } // retrieve the list of records from the database that contains // the features // // get the annotations with the given features // if (!graph.getAnnotationSetByFeature(feature_a, value_a, annos)) { return Error::handle(name(), L"getRecord - unable to find the annotation for the given feature and the value", Error::ARG, __FILE__, __LINE__); } // add the annotation labels to the list // for (bool8 more=annos.gotoFirst(); more; more=annos.gotoNext()) { tmpstr.assign(annos.getCurr()->getType()); records_a.insert(&tmpstr); } } // clean up memory // graph.clear(); // exit gracefully // return true; } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // String& feature: (input) annotation feature // String& value: (input) annotation feature value // DoubleLinkedList& records: (output) list of records // // return: logical error status // // this method retrieves the record from the database // bool8 TranscriptionDatabase::getRecord(String& identifier_a, String& feature_a, String& value_a, DoubleLinkedList& records_a) { // declare local variables // AnnotationGraph graph; DoubleLinkedList records; // declare local variables // Long* tag; // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { return false; } // when the identifier exists in the database return a copy of the record // else { graph.clear(); if (!graph.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getRecord", Error::NO_PARAM_FILE, __FILE__, __LINE__); } // retrieve the list of records from the database that contains // the features // else { // get the annotations with the given features // if (!graph.getAnnotationSetByFeature(feature_a, value_a, records)) { return false; } } } records_a.assign(records); // clean up memory // graph.clear(); // exit gracefully // return true; } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // String& feature_name: (input) feature name // Vector& values: (input) annotation feature values corresponding // to feature-name // String& record: (output) alignments // // return: logical error status // // this method retrieves the alignment from the database // bool8 TranscriptionDatabase::getRecord(String& identifier_a, String& feature_name_a, Vector& values_a, String& record_a) { // declare local variables // DoubleLinkedList annos; AnnotationGraph graph; Long* tag; String fea_score(L"score"); // check for input values // if (values_a.length() <= int32(0)) { Error::handle(name(), L"getRecord - levels missing", Error::ARG, __FILE__, __LINE__); } // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { Error::handle(name(), L"getRecord - annotation-graph missing", Error::ARG, __FILE__, __LINE__); } // when the identifier exists in the database return a copy of the record // else { graph.clear(); if (!graph.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getRecord", Error::ARG, __FILE__, __LINE__); } // loop over all the required levels // for (int32 i = 0; i < values_a.length(); i++) { // get all the annotations at feature "level" and value starting // with the top-most level // if (!graph.getAnnotationSetByFeature(feature_name_a, values_a(i), annos)) { return Error::handle(name(), L"getRecord - unable to find the annotations for the given feature and the value", Error::ARG, __FILE__, __LINE__); } // update the output alignment format // record_a.concat(L"\n"); record_a.concat(L" "); record_a.concat(feature_name_a); record_a.concat(L": "); record_a.concat(values_a(i)); // loop over each annotation at this level // Annotation* anno = (Annotation*)NULL; for (bool8 more = annos.gotoFirst(); more; more = annos.gotoNext()) { anno = (Annotation*)NULL; anno = annos.getCurr(); // check the annotation for NULL // if (anno == (Annotation*)NULL) { return Error::handle(name(), L"getRecord - NULL Annotation", Error::ARG, __FILE__, __LINE__); } // get the type and the score at this annotation // String type; String score; type.assign(anno->getType()); score.assign(anno->getFeature(fea_score)); // get start and end anchors for the current annotation // Anchor* anch_start = (Anchor*)NULL; Anchor* anch_end = (Anchor*)NULL; anch_start = anno->getStartAnchor(); anch_end = anno->getEndAnchor(); // check for NULL anchors // if ((anch_start == (Anchor*)NULL) || (anch_end == (Anchor*)NULL)) { return Error::handle(name(), L"getRecord - NULL Anchors", Error::ARG, __FILE__, __LINE__); } // get the unit and offset from the start and end anchors // String unit_start; String unit_end; unit_start.assign(anch_start->getUnit()); unit_end.assign(anch_end->getUnit()); float32 offset_start; float32 offset_end; offset_start = anch_start->getOffset(); offset_end = anch_end->getOffset(); // update the output alignment format // record_a.concat(L"\n"); record_a.concat(L" from"); record_a.concat(L": "); record_a.concat(offset_start); record_a.concat(L" "); record_a.concat(unit_start); record_a.concat(L" to"); record_a.concat(L": "); record_a.concat(offset_end); record_a.concat(L" "); record_a.concat(unit_end); record_a.concat(L" "); record_a.concat(type); record_a.concat(L" "); record_a.concat(score); } // update the output alignment format // record_a.concat(L"\n"); } // update the output alignment format // record_a.concat(L"\n"); } // clean up memory // graph.clear(); // exit gracefully // return true; } // method: getHypothesesNistTrn // // arguments: // Sdb& ident_list_sdb: (input) list of identifier // Sdb& exlude_symbols_sdb: (input) list of exclude_symbols // String& fvalue: (input) annotation feature value (transcription level) // Vector& nist_hypotheses: (output) vector of NIST_TRN format // hypotheses // // return: logical error status // // this method retrieves the hypotheses at a given feature (level) // from the database in NIST_TRN format // bool8 TranscriptionDatabase::getHypothesesNistTrn(Sdb& ident_list_sdb_a, Sdb& exclude_symbols_sdb_a, String& fvalue_a, Vector& nist_hypotheses_a) { // declare local variables // Filename input_ID; int32 num_ident = (int32)0; Vector identifiers; // default feature name for transcriptions // String fname(L"level"); // default feature name for level-index and highest level-index // if (fvalue_a.length() < (int32)1) { fname.assign(L"level_index"); fvalue_a.assign(L"0"); } // if no identifiers are specified, extract all the transcriptions // contained in the database // int32 len = ident_list_sdb_a.length(); if (len < (int32)1) { getIdentifiers(identifiers); } else { identifiers.setLength(len); } // loop over all the identifiers // int32 i = (int32)0; for (ident_list_sdb_a.gotoFirst(); ident_list_sdb_a.getName(input_ID); ident_list_sdb_a.gotoNext()) { // get the identifier // identifiers(i).assign(input_ID); i++; } // set the capacity of the output vector to the length of the input // identifiers // nist_hypotheses_a.setCapacity(identifiers.length()); // loop over all the identifiers and extract the hypotheses // corresponding to each // for (int32 j = 0; j < identifiers.length(); j++) { // local variables // DoubleLinkedList records; if (debug_level_d >= Integral::NONE) { Console::increaseIndention(); String output; output.assign(L"\nretrieving transcription for identifier: "); output.concat(identifiers(j)); output.concat(L", "); output.concat(fname); output.concat(L": "); output.concat(fvalue_a); Console::put(output); Console::decreaseIndention(); } // get the transcription at the given level // if (!getRecord(identifiers(j), fname, fvalue_a, records)) { identifiers(j).debug(L"identifier"); return Error::handle(name(), L"getHypothesesNistTrn - unable to find transcription for identifier", Error::ARG, __FILE__, __LINE__); } // concat all the records to get the hypothesis // nist_hypotheses_a.setLength(nist_hypotheses_a.length() + (int32)1); for (bool8 more = records.gotoFirst(); more; more = records.gotoNext()) { // get the record // String* record = records.getCurr(); // local variables // String exclude_symbol; Filename input_exclude_symbol; bool8 exclude = false; // see if this record is included in the exclude-symbols list // for (exclude_symbols_sdb_a.gotoFirst(); exclude_symbols_sdb_a.getName(input_exclude_symbol); exclude_symbols_sdb_a.gotoNext()) { // get the exclude-symbol // exclude_symbol.assign(input_exclude_symbol); if (exclude_symbol.eq(*record)) { exclude = true; } } // add this record only if it is not included in the // exclude-symbols list // if (!exclude) { nist_hypotheses_a(num_ident).concat(*record); nist_hypotheses_a(num_ident).concat(L" "); } } // concat the identifier within braces // nist_hypotheses_a(num_ident).concat(L"("); nist_hypotheses_a(num_ident).concat(identifiers(j)); nist_hypotheses_a(num_ident).concat(L")\n"); // increment the counter // num_ident++; } // exit gracefully // return true; } // method: getAlignmentsHypAlign // // arguments: // Sdb& ident_list_sdb: (input) list of identifiers // String& levels: (input) transcription levels in format (word : state) // Vector& alignments: (output) alignments in HYP_ALIGN format // // return: logical error status // // this method retrieves the hypotheses at a given level(s) from the // database in HYP_ALIGN format // bool8 TranscriptionDatabase::getAlignmentsHypAlign(Sdb& ident_list_sdb_a, String& levels_a, Vector& alignments_a) { // declare local variables // Vector levels; String identifier; Filename input_ID; Vector identifiers; // default feature name for transcriptions // String fname(L"level"); // parse the input level string and get the number of levels // if (levels_a.length() > (int32)0) { if (!parseLevels(levels_a, levels)) { return Error::handle(name(), L"getAlignmentsHypAlign - unable to parse the input level", Error::ARG, __FILE__, __LINE__); } } // default feature name for level-index and highest level-index // else { fname.assign(L"level_index"); levels.setLength((int32)1); levels((int32)0).assign(L"0"); } // if no identifiers are specified, extract alignments for all the // transcriptions contained in the database // int32 len = ident_list_sdb_a.length(); if (len < (int32)1) { getIdentifiers(identifiers); } else { identifiers.setLength(len); } // loop over all the identifiers // int32 i = (int32)0; for (ident_list_sdb_a.gotoFirst(); ident_list_sdb_a.getName(input_ID); ident_list_sdb_a.gotoNext()) { // get the identifier // identifiers(i).assign(input_ID); i++; } // set the capacity of the output vector to the length of the input // identifiers // alignments_a.setCapacity(identifiers.length()); // loop over all the identifiers and extract the alignments at // required levels corresponding to each // for (int32 j = 0; j < identifiers.length(); j++) { // get the identifier // if (debug_level_d >= Integral::NONE) { Console::increaseIndention(); String output; output.assign(L"\nretrieving alignments for identifier: "); output.concat(identifiers(j)); output.concat(L", "); output.concat(fname); output.concat(L": "); for (int32 i = 0; i < levels.length(); i++) { output.concat(levels(i)); } Console::put(output); Console::decreaseIndention(); } // update the alignment string // String alignment; alignment.concat(L"alignments for identier: "); alignment.concat(identifiers(j)); // get the alignments at the given level(s) // if (!getRecord(identifiers(j), fname, levels, alignment)) { identifiers(j).debug(L"identifier"); return Error::handle(name(), L"getAlignmentsHypAlign - unable to find transcription for identifier", Error::ARG, __FILE__, __LINE__); } // store the alignment in the vector // int32 temp_len = alignments_a.length(); alignments_a.setLength(temp_len + (int32)1); alignments_a(temp_len).assign(alignment); } // exit gracefully // return true; } // method: parseLevels // // arguments: // const String& input: (input) transcription levels in format (word : state) // Vector& levels: (output) parsed levels // // return: logical error status // // this method parses the input string for the levels // bool8 TranscriptionDatabase::parseLevels(const String& input_a, Vector& levels_a) { // error if the input string in empty // if (input_a.length() == (int32)0) { return Error::handle(name(), L"parseLevels - empty string", Error::ARG, __FILE__, __LINE__); } // count the number of sub-strings separated by ',' // int32 tokens = input_a.countTokens(L','); // get all the sub-strings separated by ',' // String vals[tokens]; for (int32 i = 0, pos = 0; i < tokens; i++) { input_a.tokenize(vals[i], pos, L","); } // compute the number of all the sub-strings (levels) in input // string seperated by the delimiter ':' // int32 num = 0; for (int32 i = 0; i < tokens; i++) { num += vals[i].countTokens(L':'); } // allocate the minimun space for these levels // levels_a.clear(Integral::RELEASE); levels_a.setCapacity(num); // local variables // Vector levels_in; Vector levels_out; // loop through all the sub-strings // for (int32 i = 0; i < tokens; i++) { // local variables // String level; int32 pos = 0; // clear the levels from the previous loop // levels_in.clear(Integral::RELEASE); levels_out.clear(Integral::RELEASE); // parse the input in the format based on the endlimiter ":" // while (vals[i].tokenize(level, pos, L":")) { // get rid of white spaces if any // level.trim(); // get the levels in numericals // int32 len = levels_in.length(); levels_in.setLength(len + (int32)1); levels_in(len).assign(level); } // get the levels in numericals from the first annotation-graph in // the transcription database // // declare local variables // Vector ident; AnnotationGraph graph; // get all the identifiers in the database // if (!hash_index_d.keys(ident)) { Error::handle(name(), L"parseLevels - no transcriptions in the database", Error::ARG, __FILE__, __LINE__); } // return a copy of the record corresponding to first tag in the // vector // graph.clear(); if (!graph.read(database_sof_d, *hash_index_d.get(ident(0)))) { Error::handle(name(), L"parseLevels - transcription missing", Error::ARG, __FILE__, __LINE__); } // local variables // Vector values; VectorLong indices; DoubleLinkedList temp_annos; Annotation* temp_anno = (Annotation*)NULL; // set the capacity of the variables // int32 len = levels_in.length(); values.setCapacity(len); indices.setCapacity(len); // declare the feature "index" and the feature "level" as string // objects // String fea_index(L"level_index"); String fea_level(L"level"); // get the level values corresponding to the features // for (int32 i = 0; i < len; i++) { // set length // values.setLength(i+1); indices.setLength(i+1); // get all the annotations from the graph at a given level and // then find the index from the first annotation // if (!graph.getAnnotationSetByFeature(fea_level, levels_in(i), temp_annos)) { Error::handle(name(), L"parseLevels - transcription missing", Error::ARG, __FILE__, __LINE__); } // get the first annotation // if (temp_annos.length() > (int32)0) { temp_anno = temp_annos.getFirst(); } else Error::handle(name(), L"parseLevels - level missing", Error::ARG, __FILE__, __LINE__); // get the corresponding level index // values(i).assign((*temp_anno).getFeature(fea_index)); indices(i).assign(values(i)); } // find the minimum and maximum index of the levels // Long min_index = indices.min(); Long max_index = indices.max(); // set the minimum capacity of the output levels // levels_out.setCapacity(levels_in.length()); // get the missing levels between the min and max indices // for (int32 j = (int32)min_index; j <= (int32)max_index; j++) { // local variables // String temp_level; Long index = (Long)-1; String temp_index; // get the Annotations from the AnnotationGraph // if (!graph.getAnnotationSet(temp_annos)) { Error::handle(name(), L"parseLevels - transcription missing", Error::ARG, __FILE__, __LINE__); } // loop over all the annotations to find the corresponding level // name // for (bool8 more = temp_annos.gotoFirst(); more; more = temp_annos.gotoNext()) { // get the current annotation // temp_anno = temp_annos.getCurr(); // check for NULL condition // if (temp_anno == (Annotation*)NULL) { Error::handle(name(), L"parseLevels - NULL Annotation", Error::ARG, __FILE__, __LINE__); } // get the level and then get the corresponding index // temp_level = (*temp_anno).getFeature(fea_level); temp_index = (*temp_anno).getFeature(fea_index); index.assign(temp_index); // see if the index matches // if ((int32)index == j) { int32 temp_len = levels_out.length(); levels_out.setLength(temp_len + (int32)1); levels_out(temp_len).assign(temp_level); break; } } } // add the sub-string levels to the total numner of output levels // for ( int32 k = 0; k < levels_out.length(); k++) { int32 length_tmp = levels_a.length(); levels_a.setLength(length_tmp + (int32)1); levels_a(length_tmp).assign(levels_out(k)); } // clean up memory // graph.clear(); } // exit gracefully // return true; } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // String& feature: (input) annotation feature // String& value: (input) annotation value // Vector& annotations: (output) annotations for the feature // VectorFloat& start_times: (output) start timming information (alignments) // VectorFloat& end_times: (output) end timming information (alignments) // // return: logical error status // // this method retrieves the alignments from the database // bool8 TranscriptionDatabase::getRecord(String& identifier_a, String& feature_a, String& value_a, Vector& annotations_a, VectorFloat& start_times_a, VectorFloat& end_times_a){ // declare local variables // DoubleLinkedList annos; AnnotationGraph graph; Long* tag; // check for input values // if (value_a.length() <= int32(0)) { Error::handle(name(), L"getRecord - levels missing", Error::ARG, __FILE__, __LINE__); } // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { Error::handle(name(), L"getRecord - annotation-graph missing", Error::ARG, __FILE__, __LINE__); } // when the identifier exists in the database return a copy of the record // else { graph.clear(); if (!graph.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getRecord", Error::ARG, __FILE__, __LINE__); } // get all the annotations at feature "level" and value starting // with the top-most level // if (!graph.getAnnotationSetByFeature(feature_a, value_a, annos)) { return Error::handle(name(), L"getRecord - unable to find the annotations for the given feature and the value", Error::ARG, __FILE__, __LINE__); } // loop over each annotation at this level // Annotation* anno = (Annotation*)NULL; int32 temp = (int32)0; for (bool8 more = annos.gotoFirst(); more; more = annos.gotoNext()) { anno = (Annotation*)NULL; anno = annos.getCurr(); // check the annotation for NULL // if (anno == (Annotation*)NULL) { return Error::handle(name(), L"getRecord - NULL Annotation", Error::ARG, __FILE__, __LINE__); } // get start and end anchors for the current annotation // Anchor* anch_start = (Anchor*)NULL; Anchor* anch_end = (Anchor*)NULL; anch_start = anno->getStartAnchor(); anch_end = anno->getEndAnchor(); String annotation; annotation.assign(anno->getType()); // check for NULL anchors // if ((anch_start == (Anchor*)NULL) || (anch_end == (Anchor*)NULL)) { return Error::handle(name(), L"getRecord - NULL Anchors", Error::ARG, __FILE__, __LINE__); } // get the unit and offset from the start and end anchors // String unit_start; String unit_end; unit_start.assign(anch_start->getUnit()); unit_end.assign(anch_end->getUnit()); start_times_a.setLength(++temp); end_times_a.setLength(temp); annotations_a.setLength(temp); start_times_a(temp-(int32)1) = anch_start->getOffset(); end_times_a(temp-(int32)1) = anch_end->getOffset(); annotations_a(temp-(int32)1).assign(annotation); } } // clean up memory // graph.clear(); // exit gracefully // return true; } // method: getRecord // // arguments: // String& identifier: (input) annotation graph id // String& feature: (input) annotation feature // String& value: (input) annotation value // const String& type: (input) annotation type // Vector& annotations: (output) annotations for the feature // VectorFloat& start_times: (output) start timming information (alignments) // VectorFloat& end_times: (output) end timming information (alignments) // // return: logical error status // // this method retrieves the alignments from the database on the basis // of the feature-value pair and type // bool8 TranscriptionDatabase::getRecord(String& identifier_a, String& feature_a, String& value_a, String& type_a, Vector& annotations_a, VectorFloat& start_times_a, VectorFloat& end_times_a) { // declare local variables // DoubleLinkedList annos; AnnotationGraph graph; Long* tag; // check for input values // if (value_a.length() <= int32(0)) { Error::handle(name(), L"getRecord - levels missing", Error::ARG, __FILE__, __LINE__); } // determine if the identifier exists in the database // tag = hash_index_d.get(identifier_a); if (tag == (Long*)NULL) { Error::handle(name(), L"getRecord - annotation-graph missing", Error::ARG, __FILE__, __LINE__); } // when the identifier exists in the database return a copy of the record // else { graph.clear(); if (!graph.read(database_sof_d, (int32)*tag)) { Error::handle(name(), L"getRecord", Error::ARG, __FILE__, __LINE__); } // get all the annotations at feature "level" and value starting // with the top-most level // if (!graph.getAnnotationSetByFeature(feature_a, value_a, annos)) { return Error::handle(name(), L"getRecord - unable to find the annotations for the given feature and the value", Error::ARG, __FILE__, __LINE__); } // loop over each annotation at this level // Annotation* anno = (Annotation*)NULL; int32 temp = (int32)0; for (bool8 more = annos.gotoFirst(); more; more = annos.gotoNext()) { anno = (Annotation*)NULL; anno = annos.getCurr(); // check the annotation for NULL // if (anno == (Annotation*)NULL) { return Error::handle(name(), L"getRecord - NULL Annotation", Error::ARG, __FILE__, __LINE__); } // insert the annotation and its respective start-end times only // when the type matches // if (anno->getType().eq(type_a)) { // get start and end anchors for the current annotation // Anchor* anch_start = (Anchor*)NULL; Anchor* anch_end = (Anchor*)NULL; anch_start = anno->getStartAnchor(); anch_end = anno->getEndAnchor(); String annotation; annotation.assign(anno->getType()); // check for NULL anchors // if ((anch_start == (Anchor*)NULL) || (anch_end == (Anchor*)NULL)) { return Error::handle(name(), L"getRecord - NULL Anchors", Error::ARG, __FILE__, __LINE__); } // get the unit and offset from the start and end anchors // String unit_start; String unit_end; unit_start.assign(anch_start->getUnit()); unit_end.assign(anch_end->getUnit()); start_times_a.setLength(++temp); end_times_a.setLength(temp); annotations_a.setLength(temp); start_times_a(temp-(int32)1) = anch_start->getOffset(); end_times_a(temp-(int32)1) = anch_end->getOffset(); annotations_a(temp-(int32)1).assign(annotation); } } } // clean up memory // graph.clear(); // exit gracefully // return true; } // method: getIdentifiers // // arguments: // Sdb& identifiers_a: (output) list of identifier // // return: logical error status // // this method retrieves all the identifiers in the database // bool8 TranscriptionDatabase::getIdentifiers(Vector& identifiers_a) { // clear all the elements in the sdb object // identifiers_a.clear(); // get all the keys (identifiers) from the hash table // return hash_index_d.keys(identifiers_a); }