{"work":{"id":"cf07889b-1f35-4d81-9514-4ad3ed223c57","openalex_id":"https://openalex.org/W2970641574","doi":"10.18653/v1/d19-1410","arxiv_id":null,"raw_key":null,"title":"Sentence- BERT : Sentence Embeddings using S iamese BERT -Networks","authors":[{"given":"Nils","family":"Reimers","sequence":"first","affiliation":[]},{"given":"Iryna","family":"Gurevych","sequence":"additional","affiliation":[]}],"authors_text":"Reimers, Nils and Gurevych, Iryna","year":2019,"venue":"Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and the 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP)","abstract":null,"external_url":"https://doi.org/10.18653/v1/d19-1410","cited_by_count":7637,"metadata_source":"doi_reference","metadata_fetched_at":"2026-05-14T19:42:52.334978+00:00","pith_arxiv_id":null,"created_at":"2026-05-08T16:48:27.894480+00:00","updated_at":"2026-05-14T19:42:52.334978+00:00","title_quality_ok":true,"display_title":"Sentence- BERT : Sentence embeddings using S iamese BERT -networks","render_title":"Sentence- BERT : Sentence embeddings using S iamese BERT -networks"},"hub":{"state":{"work_id":"cf07889b-1f35-4d81-9514-4ad3ed223c57","tier":"hub","tier_reason":"10+ Pith inbound or 1,000+ external citations","pith_inbound_count":63,"external_cited_by_count":7637,"distinct_field_count":9,"first_pith_cited_at":"2022-12-07T09:25:54+00:00","last_pith_cited_at":"2026-05-13T12:19:01+00:00","author_build_status":"not_needed","summary_status":"needed","contexts_status":"needed","graph_status":"needed","ask_index_status":"not_needed","reader_status":"not_needed","recognition_status":"not_needed","updated_at":"2026-05-14T21:56:14.620594+00:00","tier_text":"hub"},"tier":"hub","role_counts":[],"polarity_counts":[],"runs":{"context_extract":{"job_type":"context_extract","status":"succeeded","result":{"enqueued_papers":25},"error":null,"updated_at":"2026-05-14T09:48:26.207088+00:00"},"graph_features":{"job_type":"graph_features","status":"succeeded","result":{"co_cited":[{"title":"doi: 10.18653/v1/N19-1423","work_id":"3e3c8ac8-b858-4b22-af32-393d98c883e0","shared_citers":10},{"title":"GPT-4 Technical Report","work_id":"b928e041-6991-4c08-8c81-0359e4097c7b","shared_citers":10},{"title":"Dense Passage Retrieval for Open-Domain Question Answering , url =","work_id":"083391f8-812d-430f-8d08-89a03031ce6c","shared_citers":9},{"title":"RoBERTa: A Robustly Optimized BERT Pretraining Approach","work_id":"41fe12c4-e538-4890-a244-480650ed3078","shared_citers":8},{"title":"SimCSE: Simple contrastive learning of sentence embeddings","work_id":"f9c0d9b5-c569-4f54-bdc7-6ff3cdd316db","shared_citers":7},{"title":"The Llama 3 Herd of Models","work_id":"1549a635-88af-4ac1-acfe-51ae7bb53345","shared_citers":7},{"title":"ColBERT: Efficient and effective passage search via con- textualized late interaction over bert","work_id":"a0aa64b7-2891-458f-a5ef-35ff6fa77f18","shared_citers":5},{"title":"FEVER: a large-scale dataset for Fact Extraction and VERification","work_id":"b696f75f-e5ad-4555-9c12-e292e77c388f","shared_citers":5},{"title":"Mteb: Massive text embedding benchmark","work_id":"7423c7ba-a393-45d6-bb35-9bc2c931a545","shared_citers":5},{"title":"Proceedings of the Association for Computational Linguistics (ACL) , pages =","work_id":"bad774d3-20f4-421f-ba75-d1ef99f02a26","shared_citers":5},{"title":"Qwen3 Technical Report","work_id":"25a4e30c-1232-48e7-9925-02fa12ba7c9e","shared_citers":5},{"title":"Retrieval-Augmented Generation for Large Language Models: A Survey","work_id":"b80d2790-6cd9-4c87-b3c4-de404f99a80e","shared_citers":5},{"title":"Text Embeddings by Weakly-Supervised Contrastive Pre-training","work_id":"789cc674-467e-4f23-bb50-05c79fe8c4c2","shared_citers":5},{"title":"DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter","work_id":"756f9764-ecd6-4672-8043-b37c698c7ad2","shared_citers":4},{"title":"doi: 10.18653/v1/ 2021.naacl-main.112","work_id":"8d675bdd-79ca-48d6-9163-fc17ce0e8ece","shared_citers":4},{"title":"Evaluating Large Language Models Trained on Code","work_id":"042493e9-b26f-4b4e-bbde-382072ca9b08","shared_citers":4},{"title":"Findings of the Association for Computational Linguistics: ACL 2024 , month = aug, year =","work_id":"6f249b44-4b0a-47dd-b5f4-1877fd9269ad","shared_citers":4},{"title":"Mistral 7B","work_id":"eb5e1305-ad11-4875-ad8d-ad8b8f697599","shared_citers":4},{"title":"MTEB: Massive text embedding benchmark","work_id":"5c5a0bda-f984-4ea2-8938-cab73425e1b6","shared_citers":4},{"title":"Proximal Policy Optimization Algorithms","work_id":"240c67fe-d14d-4520-91c1-38a4e272ca19","shared_citers":4},{"title":"Qwen3 Embedding: Advancing Text Embedding and Reranking Through Foundation Models","work_id":"bab684a8-d933-426c-a19e-2c855a0d1f59","shared_citers":4},{"title":"Representation Learning with Contrastive Predictive Coding","work_id":"7b08a1d4-d565-424e-9c86-6ef244b7b90a","shared_citers":4},{"title":"Sentence-T5: Scalable Sentence Encoders from Pre-trained Text-to-Text Models , url =","work_id":"c0499b0c-2d17-4e0b-80e9-7ee4ea48e6c2","shared_citers":4},{"title":"Adam: A Method for Stochastic Optimization","work_id":"1910796d-9b52-4683-bf5c-de9632c1028b","shared_citers":3}],"time_series":[{"n":1,"year":2022},{"n":1,"year":2023},{"n":4,"year":2024},{"n":56,"year":2026}],"dependency_candidates":[]},"error":null,"updated_at":"2026-05-14T09:48:30.429210+00:00"},"identity_refresh":{"job_type":"identity_refresh","status":"succeeded","result":{"items":[{"title":"Qwen3 Technical Report","outcome":"unchanged","work_id":"25a4e30c-1232-48e7-9925-02fa12ba7c9e","resolver":"local_arxiv","confidence":0.98,"old_work_id":"25a4e30c-1232-48e7-9925-02fa12ba7c9e"}],"counts":{"fixed":0,"merged":0,"unchanged":1,"quarantined":0,"needs_external_resolution":0},"errors":[],"attempted":1},"error":null,"updated_at":"2026-05-14T09:48:32.700972+00:00"},"summary_claims":{"job_type":"summary_claims","status":"succeeded","result":{"title":"Sentence-BERT: Sentence embeddings using Siamese BERT- networks","claims":[],"why_cited":"Pith tracks Sentence-BERT: Sentence embeddings using Siamese BERT- networks because it crossed a citation-hub threshold.","role_counts":[]},"error":null,"updated_at":"2026-05-14T09:48:30.431523+00:00"}},"summary":{"title":"Sentence-BERT: Sentence embeddings using Siamese BERT- networks","claims":[],"why_cited":"Pith tracks Sentence-BERT: Sentence embeddings using Siamese BERT- networks because it crossed a citation-hub threshold.","role_counts":[]},"graph":{"co_cited":[{"title":"doi: 10.18653/v1/N19-1423","work_id":"3e3c8ac8-b858-4b22-af32-393d98c883e0","shared_citers":10},{"title":"GPT-4 Technical Report","work_id":"b928e041-6991-4c08-8c81-0359e4097c7b","shared_citers":10},{"title":"Dense Passage Retrieval for Open-Domain Question Answering , url =","work_id":"083391f8-812d-430f-8d08-89a03031ce6c","shared_citers":9},{"title":"RoBERTa: A Robustly Optimized BERT Pretraining Approach","work_id":"41fe12c4-e538-4890-a244-480650ed3078","shared_citers":8},{"title":"SimCSE: Simple contrastive learning of sentence embeddings","work_id":"f9c0d9b5-c569-4f54-bdc7-6ff3cdd316db","shared_citers":7},{"title":"The Llama 3 Herd of Models","work_id":"1549a635-88af-4ac1-acfe-51ae7bb53345","shared_citers":7},{"title":"ColBERT: Efficient and effective passage search via con- textualized late interaction over bert","work_id":"a0aa64b7-2891-458f-a5ef-35ff6fa77f18","shared_citers":5},{"title":"FEVER: a large-scale dataset for Fact Extraction and VERification","work_id":"b696f75f-e5ad-4555-9c12-e292e77c388f","shared_citers":5},{"title":"Mteb: Massive text embedding benchmark","work_id":"7423c7ba-a393-45d6-bb35-9bc2c931a545","shared_citers":5},{"title":"Proceedings of the Association for Computational Linguistics (ACL) , pages =","work_id":"bad774d3-20f4-421f-ba75-d1ef99f02a26","shared_citers":5},{"title":"Qwen3 Technical Report","work_id":"25a4e30c-1232-48e7-9925-02fa12ba7c9e","shared_citers":5},{"title":"Retrieval-Augmented Generation for Large Language Models: A Survey","work_id":"b80d2790-6cd9-4c87-b3c4-de404f99a80e","shared_citers":5},{"title":"Text Embeddings by Weakly-Supervised Contrastive Pre-training","work_id":"789cc674-467e-4f23-bb50-05c79fe8c4c2","shared_citers":5},{"title":"DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter","work_id":"756f9764-ecd6-4672-8043-b37c698c7ad2","shared_citers":4},{"title":"doi: 10.18653/v1/ 2021.naacl-main.112","work_id":"8d675bdd-79ca-48d6-9163-fc17ce0e8ece","shared_citers":4},{"title":"Evaluating Large Language Models Trained on Code","work_id":"042493e9-b26f-4b4e-bbde-382072ca9b08","shared_citers":4},{"title":"Findings of the Association for Computational Linguistics: ACL 2024 , month = aug, year =","work_id":"6f249b44-4b0a-47dd-b5f4-1877fd9269ad","shared_citers":4},{"title":"Mistral 7B","work_id":"eb5e1305-ad11-4875-ad8d-ad8b8f697599","shared_citers":4},{"title":"MTEB: Massive text embedding benchmark","work_id":"5c5a0bda-f984-4ea2-8938-cab73425e1b6","shared_citers":4},{"title":"Proximal Policy Optimization Algorithms","work_id":"240c67fe-d14d-4520-91c1-38a4e272ca19","shared_citers":4},{"title":"Qwen3 Embedding: Advancing Text Embedding and Reranking Through Foundation Models","work_id":"bab684a8-d933-426c-a19e-2c855a0d1f59","shared_citers":4},{"title":"Representation Learning with Contrastive Predictive Coding","work_id":"7b08a1d4-d565-424e-9c86-6ef244b7b90a","shared_citers":4},{"title":"Sentence-T5: Scalable Sentence Encoders from Pre-trained Text-to-Text Models , url =","work_id":"c0499b0c-2d17-4e0b-80e9-7ee4ea48e6c2","shared_citers":4},{"title":"Adam: A Method for Stochastic Optimization","work_id":"1910796d-9b52-4683-bf5c-de9632c1028b","shared_citers":3}],"time_series":[{"n":1,"year":2022},{"n":1,"year":2023},{"n":4,"year":2024},{"n":56,"year":2026}],"dependency_candidates":[]},"authors":[]}}