Unique token filter

Unique token filter

Removes duplicate tokens from a stream. For example, you can use the unique filter to change the lazy lazy dog to the lazy dog.

If the only_on_same_position parameter is set to true, the unique filter removes only duplicate tokens in the same position.

When only_on_same_position is true, the unique filter works the same as remove_duplicates filter.

Example

The following analyze API request uses the unique filter to remove duplicate tokens from the quick fox jumps the lazy fox:

  1. resp = client.indices.analyze(
  2. tokenizer="whitespace",
  3. filter=[
  4. "unique"
  5. ],
  6. text="the quick fox jumps the lazy fox",
  7. )
  8. print(resp)
  1. response = client.indices.analyze(
  2. body: {
  3. tokenizer: 'whitespace',
  4. filter: [
  5. 'unique'
  6. ],
  7. text: 'the quick fox jumps the lazy fox'
  8. }
  9. )
  10. puts response
  1. const response = await client.indices.analyze({
  2. tokenizer: "whitespace",
  3. filter: ["unique"],
  4. text: "the quick fox jumps the lazy fox",
  5. });
  6. console.log(response);
  1. GET _analyze
  2. {
  3. "tokenizer" : "whitespace",
  4. "filter" : ["unique"],
  5. "text" : "the quick fox jumps the lazy fox"
  6. }

The filter removes duplicated tokens for the and fox, producing the following output:

  1. [ the, quick, fox, jumps, lazy ]

Add to an analyzer

The following create index API request uses the unique filter to configure a new custom analyzer.

  1. resp = client.indices.create(
  2. index="custom_unique_example",
  3. settings={
  4. "analysis": {
  5. "analyzer": {
  6. "standard_truncate": {
  7. "tokenizer": "standard",
  8. "filter": [
  9. "unique"
  10. ]
  11. }
  12. }
  13. }
  14. },
  15. )
  16. print(resp)
  1. response = client.indices.create(
  2. index: 'custom_unique_example',
  3. body: {
  4. settings: {
  5. analysis: {
  6. analyzer: {
  7. standard_truncate: {
  8. tokenizer: 'standard',
  9. filter: [
  10. 'unique'
  11. ]
  12. }
  13. }
  14. }
  15. }
  16. }
  17. )
  18. puts response
  1. const response = await client.indices.create({
  2. index: "custom_unique_example",
  3. settings: {
  4. analysis: {
  5. analyzer: {
  6. standard_truncate: {
  7. tokenizer: "standard",
  8. filter: ["unique"],
  9. },
  10. },
  11. },
  12. },
  13. });
  14. console.log(response);
  1. PUT custom_unique_example
  2. {
  3. "settings" : {
  4. "analysis" : {
  5. "analyzer" : {
  6. "standard_truncate" : {
  7. "tokenizer" : "standard",
  8. "filter" : ["unique"]
  9. }
  10. }
  11. }
  12. }
  13. }

Configurable parameters

only_on_same_position

(Optional, Boolean) If true, only remove duplicate tokens in the same position. Defaults to false.

Customize

To customize the unique filter, duplicate it to create the basis for a new custom token filter. You can modify the filter using its configurable parameters.

For example, the following request creates a custom unique filter with only_on_same_position set to true.

  1. resp = client.indices.create(
  2. index="letter_unique_pos_example",
  3. settings={
  4. "analysis": {
  5. "analyzer": {
  6. "letter_unique_pos": {
  7. "tokenizer": "letter",
  8. "filter": [
  9. "unique_pos"
  10. ]
  11. }
  12. },
  13. "filter": {
  14. "unique_pos": {
  15. "type": "unique",
  16. "only_on_same_position": True
  17. }
  18. }
  19. }
  20. },
  21. )
  22. print(resp)
  1. response = client.indices.create(
  2. index: 'letter_unique_pos_example',
  3. body: {
  4. settings: {
  5. analysis: {
  6. analyzer: {
  7. letter_unique_pos: {
  8. tokenizer: 'letter',
  9. filter: [
  10. 'unique_pos'
  11. ]
  12. }
  13. },
  14. filter: {
  15. unique_pos: {
  16. type: 'unique',
  17. only_on_same_position: true
  18. }
  19. }
  20. }
  21. }
  22. }
  23. )
  24. puts response
  1. const response = await client.indices.create({
  2. index: "letter_unique_pos_example",
  3. settings: {
  4. analysis: {
  5. analyzer: {
  6. letter_unique_pos: {
  7. tokenizer: "letter",
  8. filter: ["unique_pos"],
  9. },
  10. },
  11. filter: {
  12. unique_pos: {
  13. type: "unique",
  14. only_on_same_position: true,
  15. },
  16. },
  17. },
  18. },
  19. });
  20. console.log(response);
  1. PUT letter_unique_pos_example
  2. {
  3. "settings": {
  4. "analysis": {
  5. "analyzer": {
  6. "letter_unique_pos": {
  7. "tokenizer": "letter",
  8. "filter": [ "unique_pos" ]
  9. }
  10. },
  11. "filter": {
  12. "unique_pos": {
  13. "type": "unique",
  14. "only_on_same_position": true
  15. }
  16. }
  17. }
  18. }
  19. }