podoc:tokenized_like_sql_function
This is an old revision of the document!
Database Function Description of "tokenized_like"
Name
tokenized_like
Description
This function searches for exact matches of 'keyword' in 'string'.
The input 'string' is tokenized by a space, and each token is comapred to the 'keyword'. True is returned if the token fully matches the keyword. If the 'keyword' did not match any of the tokens of 'string' false is returned.
This function is case sensitive.
There is an alternative second version that has an integer as first argument.
Purpose
Searching for keywords.
Schema
-- This function searches for exact matches of 'keyword' in 'string'. -- The input 'string' is tokenized by a space, and each token is comapred -- to the 'keyword'. True is returned if the token fully matches the -- keyword. If the 'keyword' did not match any of the tokens of 'string' -- false is returned. -- This function is case sensitive. create function tokenized_like(varchar, varchar) returns bool as ' declare string alias for $1; keyword alias for $2; token varchar; character char; string_length integer; token_length integer; token_begin integer; index_cursor integer; head_cursor integer; begin head_cursor := 1; string_length := char_length(string); while head_cursor < string_length loop token_length := 0; index_cursor := head_cursor; token_begin := head_cursor; character := substring(string from index_cursor for 1); while index_cursor <= string_length loop if ((character = \' \') or (character = \';\') or (index_cursor = string_length)) then if (index_cursor = string_length) then token_length := token_length + 1; end if; token := substring(string from token_begin for token_length); if (token = keyword) then return true; end if; end if; token_length := token_length + 1; index_cursor := index_cursor + 1; character := substring(string from index_cursor for 1); end loop; index_cursor := head_cursor; while index_cursor <= string_length loop character := substring(string from index_cursor for 1); if ((character = \' \') or (character = \';\') or (index_cursor = string_length)) then head_cursor := index_cursor + 1; exit; end if; index_cursor := index_cursor + 1; end loop; end loop; return false; end; ' language 'plpgsql';
-- we define operators for each of our functions so that we can easily integrate -- them into the current PO search queries create operator ==== ( leftarg = varchar, rightarg = varchar, procedure = tokenized_like, commutator = ==== );
-- This function is exactly like tokenized_like, but its first argument is integer create function tokenized_like(integer, varchar) returns bool as ' declare identifier alias for $1; keyword alias for $2; begin return tokenized_like(text(identifier), keyword); end; ' language 'plpgsql';
-- we define operators for each of our functions so that we can easily integrate -- them into the current PO search queries create operator ==== ( leftarg = integer, rightarg = varchar, procedure = tokenized_like, commutator = ==== );
Related functions
History
Used first
Version 2.30
Used last
Still in use
Change history
= Version 2.30 =
-- This function searches for exact matches of 'keyword' in 'string'. -- The input 'string' is tokenized by a space, and each token is comapred -- to the 'keyword'. True is returned if the token fully matches the -- keyword. If the 'keyword' did not match any of the tokens of 'string' -- false is returned. -- This function is case sensitive. create function tokenized_like(varchar, varchar) returns bool as ' declare string alias for $1; keyword alias for $2; token varchar; character char; string_length integer; token_length integer; token_begin integer; index_cursor integer; head_cursor integer; begin head_cursor := 1; string_length := char_length(string); while head_cursor < string_length loop token_length := 0; index_cursor := head_cursor; token_begin := head_cursor; character := substring(string from index_cursor for 1); while index_cursor <= string_length loop if ((character = \' \') or (character = \';\') or (index_cursor = string_length)) then if (index_cursor = string_length) then token_length := token_length + 1; end if; token := substring(string from token_begin for token_length); if (token = keyword) then return true; end if; end if; token_length := token_length + 1; index_cursor := index_cursor + 1; character := substring(string from index_cursor for 1); end loop; index_cursor := head_cursor; while index_cursor <= string_length loop character := substring(string from index_cursor for 1); if ((character = \' \') or (character = \';\') or (index_cursor = string_length)) then head_cursor := index_cursor + 1; exit; end if; index_cursor := index_cursor + 1; end loop; end loop; return false; end; ' language 'plpgsql';
-- we define operators for each of our functions so that we can easily integrate -- them into the curret PO search queries create operator ==== ( leftarg = varchar, rightarg = varchar, procedure = tokenized_like, commutator = ==== );
-- This function is exactly like tokenized_like, but its first argument is integer create function tokenized_like(integer, varchar) returns bool as ' declare identifier alias for $1; keyword alias for $2; begin return tokenized_like(text(identifier), keyword); end; ' language 'plpgsql';
-- we define operators for each of our functions so that we can easily integrate -- them into the curret PO search queries create operator ==== ( leftarg = integer, rightarg = varchar, procedure = tokenized_like, commutator = ==== );
podoc/tokenized_like_sql_function.1176908615.txt.gz · Last modified: 2007/04/18 15:03 by Luud