Add new SentenceTransformer model
Browse files- 1_Pooling/config.json +10 -0
- README.md +948 -0
- config.json +27 -0
- config_sentence_transformers.json +10 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- modules.json +20 -0
- sentence_bert_config.json +4 -0
- special_tokens_map.json +51 -0
- tokenizer.json +0 -0
- tokenizer_config.json +65 -0
- vocab.json +0 -0
1_Pooling/config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"word_embedding_dimension": 768,
|
| 3 |
+
"pooling_mode_cls_token": false,
|
| 4 |
+
"pooling_mode_mean_tokens": true,
|
| 5 |
+
"pooling_mode_max_tokens": false,
|
| 6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
+
"pooling_mode_weightedmean_tokens": false,
|
| 8 |
+
"pooling_mode_lasttoken": false,
|
| 9 |
+
"include_prompt": true
|
| 10 |
+
}
|
README.md
ADDED
|
@@ -0,0 +1,948 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
tags:
|
| 3 |
+
- sentence-transformers
|
| 4 |
+
- sentence-similarity
|
| 5 |
+
- feature-extraction
|
| 6 |
+
- generated_from_trainer
|
| 7 |
+
- dataset_size:647
|
| 8 |
+
- loss:MultipleNegativesRankingLoss
|
| 9 |
+
base_model: sentence-transformers/all-distilroberta-v1
|
| 10 |
+
widget:
|
| 11 |
+
- source_sentence: Google Sheets expertise, data validation, report restructuring
|
| 12 |
+
sentences:
|
| 13 |
+
- 'Requirements: We''re looking for a candidate with exceptional proficiency in
|
| 14 |
+
Google Sheets. This expertise should include manipulating, analyzing, and managing
|
| 15 |
+
data within Google Sheets. The candidate should be outstanding at extracting business
|
| 16 |
+
logic from existing reports and implementing it into new ones. Although a basic
|
| 17 |
+
understanding of SQL for tasks related to data validation and metrics calculations
|
| 18 |
+
is beneficial, the primary skill we are seeking is proficiency in Google Sheets.
|
| 19 |
+
This role will involve working across various cross-functional teams, so strong
|
| 20 |
+
communication skills are essential. The position requires a meticulous eye for
|
| 21 |
+
detail, a commitment to delivering high-quality results, and above all, exceptional
|
| 22 |
+
competency in Google Sheets
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
Google sheet knowledge is preferred.Strong Excel experience without Google will
|
| 26 |
+
be considered.Data Validation and formulas to extract data are a mustBasic SQL
|
| 27 |
+
knowledge is required.Strong communications skills are requiredInterview process:
|
| 28 |
+
2 or 3 round. Excel (Google) skill test assessment.'
|
| 29 |
+
- "Requirements\n\nWe are seeking 3+ years of related experience and a bachelor's\
|
| 30 |
+
\ or advanced degree in STEM from an accredited institution.Active in scope DoD\
|
| 31 |
+
\ TS/SCI security clearance. Ability to conduct analysis and import / ingest test\
|
| 32 |
+
\ data sets into the ArcGIS platform. Support testing events and ensure the data\
|
| 33 |
+
\ is collected and brought back for ingestion. Must possess the ability to work\
|
| 34 |
+
\ independently with minimal oversight while maintaining focus on research objectives\
|
| 35 |
+
\ defined by the client.\n\nWhat We Can Offer You\n\n We’ve been named a Best\
|
| 36 |
+
\ Place to Work by the Washington Post. Our employees value the flexibility at\
|
| 37 |
+
\ CACI that allows them to balance quality work and their personal lives. We offer\
|
| 38 |
+
\ competitive benefits and learning and development opportunities. We are mission-oriented\
|
| 39 |
+
\ and ever vigilant in aligning our solutions with the nation’s highest priorities.\
|
| 40 |
+
\ For over 55 years, the principles of CACI’s unique, character-based culture\
|
| 41 |
+
\ have been the driving force behind our success.\n\nCompany Overview\n\nCACI\
|
| 42 |
+
\ is an Equal Opportunity/Affirmative Action Employer. All qualified applicants\
|
| 43 |
+
\ will receive consideration for employment without regard to race, color, religion,\
|
| 44 |
+
\ sex, sexual orientation, gender identity, national origin, disability, status\
|
| 45 |
+
\ as a protected veteran, or any other protected characteristic.\n\nPay Range:\
|
| 46 |
+
\ There are a host of factors that can influence final salary including, but not\
|
| 47 |
+
\ limited to, geographic location, Federal Government contract labor categories\
|
| 48 |
+
\ and contract wage rates, relevant prior work experience, specific skills and\
|
| 49 |
+
\ competencies, education, and certifications. Our employees value the flexibility\
|
| 50 |
+
\ at CACI that allows them to balance quality work and their personal lives. We\
|
| 51 |
+
\ offer competitive compensation, benefits and learning and development opportunities.\
|
| 52 |
+
\ Our broad and competitive mix of benefits options is designed to support and\
|
| 53 |
+
\ protect employees and their families. At CACI, you will receive comprehensive\
|
| 54 |
+
\ benefits such as; healthcare, wellness, financial, retirement, family support,\
|
| 55 |
+
\ continuing education, and time off benefits. Learn more here\n\nThe Proposed\
|
| 56 |
+
\ Salary Range For This Position Is\n\n$74,600-$156,700"
|
| 57 |
+
- "requirements and develop solutions that meet those needs.Stay up-to-date with\
|
| 58 |
+
\ emerging trends and technologies in robotics, machine learning, and UAS technology.\n\
|
| 59 |
+
\nDue to the nature of the work, the selected applicant must be able to work onsite.\n\
|
| 60 |
+
\nQualifications We Require\n\n Bachelor's degree in Computer Engineering, Computer\
|
| 61 |
+
\ Science, Electrical Engineering, Software Engineering, Mechanical Engineering,\
|
| 62 |
+
\ Optical Science, Robotics, or related STEM field. A higher-level degree (MS,\
|
| 63 |
+
\ PhD) in rellevant field may also be considered in lieu of Bachelor's degree.\
|
| 64 |
+
\ Equivalent experience in lieu of degree must be directly related experience\
|
| 65 |
+
\ that demonstrate the knowledge, skills, and ability to perform the duties of\
|
| 66 |
+
\ the job. Ability to obtain and maintain a DOE Q-level security clearance. \n\
|
| 67 |
+
\nQualifications We Desire\n\n Strong knowledge of computer vision, deep learning,\
|
| 68 |
+
\ and other machine learning techniques. Strong written communication skills\
|
| 69 |
+
\ (e.g., published research in technical journals) Desire to work on solutions\
|
| 70 |
+
\ to National Security problems, especially in counter-autonomy and physical security\
|
| 71 |
+
\ system applications. Ability to work in a fast-paced environment with multiple\
|
| 72 |
+
\ priorities and tight deadlines. Demonstrated ability to perform machine learning\
|
| 73 |
+
\ related activities such as pipeline development, model explainability, and uncertainty\
|
| 74 |
+
\ quantification. Strong teamwork and leadership skills. Ability to travel domestically\
|
| 75 |
+
\ and internationally as needed (less than 15% of the time). Experience in the\
|
| 76 |
+
\ following: Python, ROS, and other scripting and scientific computing languages\
|
| 77 |
+
\ (R, C++, Java, C#) Simulation software such as Gazebo. Simulation engines\
|
| 78 |
+
\ such as Unreal or Unity. 3D modeling software. Linux/Unix operating systems.\
|
| 79 |
+
\ FPGAs. Familiarity with embedded systems and microcontrollers. Multi-sensor\
|
| 80 |
+
\ data fusion and coordination. Active DOE Q-level or DOD equivalent security\
|
| 81 |
+
\ clearance. \n\nAbout Our Team\n\nThe Mission of department 6534 is to counter\
|
| 82 |
+
\ evolving autonomous threats to key national facilities and to improve the performance\
|
| 83 |
+
\ of physical security systems protecting those sites. We are part of a larger\
|
| 84 |
+
\ group focused on Autonomy and Unmanned Systems. We address real-world problems\
|
| 85 |
+
\ through research, development, testing, and evaluation of components and systems\
|
| 86 |
+
\ to advance the science of physical security. This enables customers to mitigate\
|
| 87 |
+
\ threats to these facilities by improving the ability to sense, assess, track,\
|
| 88 |
+
\ and respond to physical incursions. Our work addresses current physical security\
|
| 89 |
+
\ operational challenges and evolving threats such as unmanned aircraft systems\
|
| 90 |
+
\ (UAS). We specialize in the testing and evaluation of Counter-UAS (C-UAS) systems,\
|
| 91 |
+
\ which counter the danger posed by UAS, and we are the C-UAS test agent for DOE,\
|
| 92 |
+
\ NNSA, and DHS.\n\nPosting Duration\n\nThis posting will be open for application\
|
| 93 |
+
\ submissions for a minimum of seven (7) calendar days, including the ‘posting\
|
| 94 |
+
\ date’. Sandia reserves the right to extend the posting date at any time.\n\n\
|
| 95 |
+
Security Clearance\n\nSandia is required by DOE to conduct a pre-employment drug\
|
| 96 |
+
\ test and background review that includes checks of personal references, credit,\
|
| 97 |
+
\ law enforcement records, and employment/education verifications. Applicants\
|
| 98 |
+
\ for employment need to be able to obtain and maintain a DOE Q-level security\
|
| 99 |
+
\ clearance, which requires U.S. citizenship. If you hold more than one citizenship\
|
| 100 |
+
\ (i.e., of the U.S. and another country), your ability to obtain a security clearance\
|
| 101 |
+
\ may be impacted.\n\nApplicants offered employment with Sandia are subject to\
|
| 102 |
+
\ a federal background investigation to meet the requirements for access to classified\
|
| 103 |
+
\ information or matter if the duties of the position require a DOE security clearance.\
|
| 104 |
+
\ Substance abuse or illegal drug use, falsification of information, criminal\
|
| 105 |
+
\ activity, serious misconduct or other indicators of untrustworthiness can cause\
|
| 106 |
+
\ a clearance to be denied or terminated by DOE, resulting in the inability to\
|
| 107 |
+
\ perform the duties assigned and subsequent termination of employment.\n\n\n\n\
|
| 108 |
+
All qualified applicants will receive consideration for employment without regard\
|
| 109 |
+
\ to race, color, religion, sex, sexual orientation, gender identity, national\
|
| 110 |
+
\ origin, age, disability, or veteran status and any other protected class under\
|
| 111 |
+
\ state or federal law.\n\nNNSA Requirements For MedPEDs\n\nIf you have a Medical\
|
| 112 |
+
\ Portable Electronic Device (MedPED), such as a pacemaker, defibrillator, drug-releasing\
|
| 113 |
+
\ pump, hearing aids, or diagnostic equipment and other equipment for measuring,\
|
| 114 |
+
\ monitoring, and recording body functions such as heartbeat and brain waves,\
|
| 115 |
+
\ if employed by Sandia National Laboratories you may be required to comply with\
|
| 116 |
+
\ NNSA security requirements for MedPEDs.\n\nIf you have a MedPED and you are\
|
| 117 |
+
\ selected for an on-site interview at Sandia National Laboratories, there may\
|
| 118 |
+
\ be additional steps necessary to ensure compliance with NNSA security requirements\
|
| 119 |
+
\ prior to the interview date.\n\nJob ID: 693235"
|
| 120 |
+
- source_sentence: Data analysis, operations reporting, SQL expertise
|
| 121 |
+
sentences:
|
| 122 |
+
- 'experience in data engineering, software engineering, data analytics, or machine
|
| 123 |
+
learning.Strong expertise working with one or more cloud data platforms (Snowflake,
|
| 124 |
+
Sagemaker, Databricks, etc.)Experience managing Snowflake infrastructure with
|
| 125 |
+
terraform.Experience building batch, near real-time, and real-time data integrations
|
| 126 |
+
with multiple sources including event streams, APIs, relational databases, noSQL
|
| 127 |
+
databases, graph databases, document stores, and cloud object stores.Strong ability
|
| 128 |
+
to debug, write, and optimize SQL queries in dbt. Experience with dbt is a must.Strong
|
| 129 |
+
programming experience in one or more modern programming languages (Python, Clojure,
|
| 130 |
+
Scala, Java, etc.)Experience working with both structured and semi-structured
|
| 131 |
+
data.Experience with the full software development lifecycle including requirements
|
| 132 |
+
gathering, design, implementation, testing, deployment, and iteration.Strong understanding
|
| 133 |
+
of CI/CD principles.Strong ability to document, diagram, and deliver detailed
|
| 134 |
+
presentations on solutions.
|
| 135 |
+
|
| 136 |
+
Preferred Experience:Expertise managing and integrating with cloud data streaming
|
| 137 |
+
platforms (Kinesis Data Streams, Kafka, AWS SNS/SQS, Azure Event Hubs, StreamSets,
|
| 138 |
+
NiFi, Databricks, etc.)Expertise in working with cloud data integration platforms
|
| 139 |
+
(Airflow / AWS MWAA, Snowflake Snowpipe, Kinesis Data Firehose, AWS Glue / Glue
|
| 140 |
+
schema registry, Azure Data Factory, AWS DMS, Fivetran, Databricks, Dell Boomi,
|
| 141 |
+
etc.)Experience building data infrastructure in a cloud environment using one
|
| 142 |
+
or more infrastructure as code tools (Terraform, AWS CloudFormation, Ansible,
|
| 143 |
+
etc.)Production experience with one or more cloud machine learning platforms (AWS
|
| 144 |
+
Sagemaker, Databricks ML, Dataiku, etc.)Understanding of machine learning libraries
|
| 145 |
+
(MLlib, Scikit-learn, Numpy, Pandas, etc.)Experience managing data governance
|
| 146 |
+
and security enablement (role-based access, authentication, network isolation,
|
| 147 |
+
data quality, data transparency, etc.) on a cloud data warehouse, especially Snowflake.Experience
|
| 148 |
+
building and optimizing data models with tools like dbt and Spark.Experience integrating
|
| 149 |
+
with data visualization tools (Sisense, Tableau, PowerBI, Looker, etc.)Our data
|
| 150 |
+
engineering and analytics stack includes Snowflake, dbt, Fivetran, Airflow, AWS,
|
| 151 |
+
Sagemaker, and Python programming for custom data engineering. We use Sisense
|
| 152 |
+
and Sigma for BI capability. Experience with this or similar tool would be preferred.
|
| 153 |
+
Data team owns the provisioning and administration of all the tools we work with.
|
| 154 |
+
|
| 155 |
+
BENEFITS:Comprehensive and affordable insurance benefitsUnlimited paid time off
|
| 156 |
+
policy401(k) enrollment9 paid company holidaysPaid parental leave
|
| 157 |
+
|
| 158 |
+
Employment at Splash is based on individual merit. Opportunities are open to all,
|
| 159 |
+
without regard to race, color, religion, sex, creed, age, handicap, national origin,
|
| 160 |
+
ancestry, military status, veteran status, medical condition, marital status,
|
| 161 |
+
sexual orientation, affectional preference, or other irrelevant factors. Splash
|
| 162 |
+
is'
|
| 163 |
+
- 'experiences Spectrum is known for.
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
BE PART OF THE CONNECTION
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
As a Data Scientist in the Credit Services department, you’ll work in a fast-paced,
|
| 170 |
+
collaborative environment to develop data-driven solutions to Charter’s business
|
| 171 |
+
problems. You’ll be empowered to think of new approaches, use analytical, statistical
|
| 172 |
+
and programming skills to analyze and interpret data sets, and learn new skills
|
| 173 |
+
while growing your career with Spectrum.
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
What Our Data Scientists Enjoy Most
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
Leveraging knowledge in analytical and statistical algorithms to assist stakeholders
|
| 180 |
+
in improving their businessPartnering on the design and implementation of statistical
|
| 181 |
+
data quality procedures for existing and new data sourcesCommunicating complex
|
| 182 |
+
data science solutions, concepts, and analyses to team members and business leadersPresenting
|
| 183 |
+
data insights & recommendations to key stakeholdersEstablishing links across existing
|
| 184 |
+
data sources and finding new, interesting data correlationsEnsuring testing and
|
| 185 |
+
validation are components of all analytics solutions
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
You’ll work in a dynamic office environment. You’ll excel in this role if you
|
| 189 |
+
are a self-starter who can work independently as well as in a team. If you’re
|
| 190 |
+
comfortable presenting data and findings in front of team members & stakeholders
|
| 191 |
+
and have excellent problem-solving skills, this could be the role for you.
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
Required Qualifications
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
WHAT YOU’LL BRING TO SPECTRUM
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
Experience: Data analytics experience: 3 years, programming experience: 2 yearsEducation:
|
| 201 |
+
Bachelor’s degree in computer science, statistics, or operations research, or
|
| 202 |
+
equivalent combination of education and experienceTechnical skills: Python, R,
|
| 203 |
+
comprehensive SQL skill, Spark, HiveSkills: Experience with analytics and modeling
|
| 204 |
+
on large datasets encompassing millions of records; Experience with the full model
|
| 205 |
+
development and implementation cycle from ideation; Research, train and test models
|
| 206 |
+
to model implementationAbilities: Perform in-depth & independent research and
|
| 207 |
+
analysis; Experience using a data science toolkit such as Python or R, command
|
| 208 |
+
of statistical techniques and machine learning algorithms; Ability to work with
|
| 209 |
+
minimum supervision; Effective communication, verbal and written, relationship
|
| 210 |
+
management, and customer service skills with a focus on working effectively in
|
| 211 |
+
a team environmentTravel: As required (10%)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
Preferred Qualifications
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
Education: Graduate degree in statistics, mathematics, analytics or operations
|
| 218 |
+
researchExperience: Experience in working with large consumer data to discern
|
| 219 |
+
consumer behaviors and risk profiles, ideally in telecommunication or banking
|
| 220 |
+
industries.
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
SPECTRUM CONNECTS YOU TO MORE
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
Dynamic Growth: The growth of our industry and evolving technology powers our
|
| 227 |
+
employees’ careers as they move up or around the companyLearning Culture: We invest
|
| 228 |
+
in your learning, and provide paid training and coaching to help you succeedSupportive
|
| 229 |
+
Teams: Be part of a strong community that gives you opportunities to network and
|
| 230 |
+
grow, and wants to see you succeed Total Rewards: See all the ways we invest in
|
| 231 |
+
you—at work and in life
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
Apply now, connect a friend to this opportunity or sign up for job alerts!
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
BDA303 2023-25170 2023
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
Here, employees don’t just have jobs, they build careers. That’s why we believe
|
| 241 |
+
in offering a comprehensive pay and benefits package that rewards employees for
|
| 242 |
+
their contributions to our success, supports all aspects of their well-being,
|
| 243 |
+
and delivers real value at every stage of life.
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
A qualified applicant’s criminal history, if any, will be considered in a manner
|
| 247 |
+
consistent with applicable laws, including local ordinances.
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
Get to Know Us Charter Communications is known in the United States by our Spectrum
|
| 251 |
+
brands, including: Spectrum Internet®, TV, Mobile and Voice, Spectrum Networks,
|
| 252 |
+
Spectrum Enterprise and Spectrum Reach. When you join us, you’re joining a strong
|
| 253 |
+
community of more than 101,000 individuals working together to serve more than
|
| 254 |
+
32 million customers in 41 states and keep them connected to what matters most.
|
| 255 |
+
Watch this video to learn more.
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
Who You Are Matters Here We’re committed to growing a workforce that reflects
|
| 259 |
+
our communities, and providing equal opportunities for employment and advancement.'
|
| 260 |
+
- "requirements, determine technical issues, and design reports to meet data analysis\
|
| 261 |
+
\ needsDeveloping and maintaining web-based dashboards for real-time reporting\
|
| 262 |
+
\ of key performance indicators for Operations. Dashboards must be simple to use,\
|
| 263 |
+
\ easy to understand, and accurate.Maintenance of current managerial reports and\
|
| 264 |
+
\ development of new reportsDevelop and maintain reporting playbook and change\
|
| 265 |
+
\ logOther duties in the PUA department as assigned\n\nWhat YOU Will Bring To\
|
| 266 |
+
\ C&F\n\nSolid analytical and problem solving skillsIntuitive, data-oriented with\
|
| 267 |
+
\ a creative, solutions-based approachAbility to manage time, multi-task and prioritizes\
|
| 268 |
+
\ multiple assignments effectivelyAbility to work independently and as part of\
|
| 269 |
+
\ a teamAble to recognize and analyze business and data issues with minimal supervision,\
|
| 270 |
+
\ ability to escalate when necessaryAble to identify cause and effect relationships\
|
| 271 |
+
\ in data and work process flows\n\nRequirements\n\n3 years in an Analyst role\
|
| 272 |
+
\ is requiredA Bachelor’s degree in associated field of study; data science, computer\
|
| 273 |
+
\ science, mathematics, economics, statistics, etc. is requiredExperience using\
|
| 274 |
+
\ SQL is requiredExperience with common data science toolkits is requiredPrior\
|
| 275 |
+
\ experience creating operations analysis\n\nWhat C&F Will Bring To You\n\nCompetitive\
|
| 276 |
+
\ compensation packageGenerous 401K employer match Employee Stock Purchase plan\
|
| 277 |
+
\ with employer matchingGenerous Paid Time OffExcellent benefits that go beyond\
|
| 278 |
+
\ health, dental & vision. Our programs are focused on your whole family’s wellness\
|
| 279 |
+
\ including your physical, mental and financial wellbeingA core C&F tenant is\
|
| 280 |
+
\ owning your career development so we provide a wealth of ways for you to keep\
|
| 281 |
+
\ learning, including tuition reimbursement, industry related certifications and\
|
| 282 |
+
\ professional training to keep you progressing on your chosen pathA dynamic,\
|
| 283 |
+
\ ambitious, fun and exciting work environmentWe believe you do well by doing\
|
| 284 |
+
\ good and want to encourage a spirit of social and community responsibility,\
|
| 285 |
+
\ matching donation program, volunteer opportunities, and an employee driven corporate\
|
| 286 |
+
\ giving program that lets you participate and support your community\n\nAt C&F\
|
| 287 |
+
\ you will BELONG\n\nWe value inclusivity and diversity. We are committed to \n\
|
| 288 |
+
\nCrum & Forster is committed to ensuring a workplace free from discriminatory\
|
| 289 |
+
\ pay disparities and complying with applicable pay equity laws. Salary ranges\
|
| 290 |
+
\ are available for all positions at this location, taking into account roles\
|
| 291 |
+
\ with a comparable level of responsibility and impact in the relevant labor market\
|
| 292 |
+
\ and these salary ranges are regularly reviewed and adjusted in accordance with\
|
| 293 |
+
\ prevailing market conditions. The annualized base pay for the advertised position,\
|
| 294 |
+
\ located in the specified area, ranges from a minimum of $68,000 to a maximum\
|
| 295 |
+
\ of $113,300. The actual compensation is determined by various factors, including\
|
| 296 |
+
\ but not limited to the market pay for the jobs at each level, the responsibilities\
|
| 297 |
+
\ and skills required for each job, and the employee’s contribution (performance)\
|
| 298 |
+
\ in that role. To be considered within market range, a salary is at or above\
|
| 299 |
+
\ the minimum of the range. You may also have the opportunity to participate in\
|
| 300 |
+
\ discretionary equity (stock) based compensation and/or performance-based variable\
|
| 301 |
+
\ pay programs."
|
| 302 |
+
- source_sentence: Data analysis, dashboard development, root cause analysis
|
| 303 |
+
sentences:
|
| 304 |
+
- 'skills to help establish routine reporting, conduct root cause analysis, and
|
| 305 |
+
continuously improve data quality and processes.
|
| 306 |
+
|
| 307 |
+
Experience in data analysis, problem-solving, or data scienceProficiency in Excel
|
| 308 |
+
required, with experience in Tableau, SQL, or SAS preferred.Open to using various
|
| 309 |
+
technologiesA mix of technical skills and the ability to learn supply chain domain
|
| 310 |
+
knowledgeStrong communication and storytelling skillsEntrepreneurial mindset with
|
| 311 |
+
flexibility to work in a dynamic environment
|
| 312 |
+
|
| 313 |
+
Soft Skills Needed:Problem solving - Ability to creatively solve problems through
|
| 314 |
+
data analysis.Curiosity - A curious nature and willingness to learn. Carter prioritizes
|
| 315 |
+
this over experience.Entrepreneurial mindset - Comfort with ambiguity and willingness
|
| 316 |
+
to work scrappy in a dynamic environment.Critical thinking - Ability to think
|
| 317 |
+
critically about data and uncover insights.Communication - Comfort communicating
|
| 318 |
+
findings to cross-functional teams.Adaptability - Openness to different perspectives
|
| 319 |
+
and willingness to be influenced by new ideas.Go-getter attitude - Self-starter
|
| 320 |
+
mentality who is comfortable wearing multiple hats.
|
| 321 |
+
|
| 322 |
+
Qualities of Successful Candidates:Carter is seeking a problem-solver first and
|
| 323 |
+
foremost, not a supply chain expert. He prioritizes soft skills over industry
|
| 324 |
+
experience.We are looking for a self-starter who is eager to take ownership of
|
| 325 |
+
this role.This is an opportunity for hands-on experience working directly with
|
| 326 |
+
a senior leader to help transform data and processes.The ideal candidate will
|
| 327 |
+
be a creative problem-solver who thrives in an ambiguous environment.The data
|
| 328 |
+
environment is dynamic and ambiguous with limited resources currently. Candidates
|
| 329 |
+
should be comfortable with uncertainty.'
|
| 330 |
+
- "experienced data analysts/scientists.\n\nQualifications\n\nMaster's Degree and\
|
| 331 |
+
\ at least 3 years of relevant experience.Strong Organization and time line management\
|
| 332 |
+
\ skills .Experience in AI/ML modeling approaches such as: metabolic modeling,\
|
| 333 |
+
\ convolutional neural networks, and Gradient-weighted Class Activation Mapping.Understand\
|
| 334 |
+
\ all phases of the analytic process including data collection, preparation, modeling,\
|
| 335 |
+
\ evaluation, and deployment.\n\nAnticipated hiring range: $100,000 - $120,000\
|
| 336 |
+
\ / annual\n\nTo Apply\n\nPlease visit UVA job board: https://jobs.virginia.edu\
|
| 337 |
+
\ and search for “R0056431”\n\nComplete An Application And Attach\n\nCover LetterCurriculum\
|
| 338 |
+
\ Vitae \n\nPlease note that multiple documents can be uploaded in the box.\n\n\
|
| 339 |
+
INTERNAL APPLICANTS: Please search for \"find jobs\" on your workday home page\
|
| 340 |
+
\ and apply using the internal job board.\n\nReview of applications will begin\
|
| 341 |
+
\ January 22, 2024 and continue until the position is filled.\n\nFor questions\
|
| 342 |
+
\ about the position, please contact: Adam Greene, Research Program Officer ([email protected])\
|
| 343 |
+
\ For questions about the application process, please contact: Rhiannon O'Coin\
|
| 344 |
+
\ ([email protected])\n\nFor more information about the School of Data Science,\
|
| 345 |
+
\ please see www.datascience.virginia.edu\n\nFor more information about the University\
|
| 346 |
+
\ of Virginia and the Charlottesville community, please see www.virginia.edu/life/charlottesville\
|
| 347 |
+
\ and www.embarkuva.com\n\nThe selected candidate will be required to complete\
|
| 348 |
+
\ a background check at the time of the offer per University policy.\n\nPHYSICAL\
|
| 349 |
+
\ DEMANDS This is primarily a sedentary job involving extensive use of desktop\
|
| 350 |
+
\ computers. The job does occasionally require traveling some distance to attend\
|
| 351 |
+
\ meetings, and programs.\n\nThe University of Virginia, including the UVA Health\
|
| 352 |
+
\ System which represents the UVA Medical Center, Schools of Medicine and Nursing,\
|
| 353 |
+
\ UVA Physician’s Group and the Claude Moore Health Sciences Library, are fundamentally\
|
| 354 |
+
\ committed to the diversity of our faculty and staff. We believe diversity is\
|
| 355 |
+
\ excellence expressing itself through every person's perspectives and lived experiences.\
|
| 356 |
+
\ We are equal opportunity and affirmative action employers. All qualified applicants\
|
| 357 |
+
\ will receive consideration for employment without regard to age, color, disability,\
|
| 358 |
+
\ gender identity or expression, marital status, national or ethnic origin, political\
|
| 359 |
+
\ affiliation, race, religion, sex (including pregnancy), sexual orientation,\
|
| 360 |
+
\ veteran status, and family medical or genetic information."
|
| 361 |
+
- 'SKILLS AND EXPERIENCE4+ years of experience in machine learning and software
|
| 362 |
+
engineeringMultiple years of experience deploying machine learning and statistical
|
| 363 |
+
models into real world applicationsExperience writing production level codeGood
|
| 364 |
+
communication skills and experience working cross functionally with non technical
|
| 365 |
+
teamsExperience with techniques such as classification, regression, tree-based
|
| 366 |
+
methods, or anomaly detectionHuge Plus: Experience in pricing or automotive industry!Tools:
|
| 367 |
+
Python, Spark, Pyspark THE BENEFITSAs a Senior Machine Learning Engineer, you
|
| 368 |
+
can expect a base salary between $150,000 to $180,000 (based on experience) plus
|
| 369 |
+
competitive benefits. HOW TO APPLYPlease register your interest by sending your
|
| 370 |
+
CV to Kristianna Chung via the Apply link on this page'
|
| 371 |
+
- source_sentence: Data Visualization with Power BI, Advanced Analytics Model Deployment,
|
| 372 |
+
Azure Analytics Services
|
| 373 |
+
sentences:
|
| 374 |
+
- "experience, skills and abilities will determine where an employee is ultimately\
|
| 375 |
+
\ placed in the pay range.\n\nCategory/Shift\n\nSalaried Full-Time\n\nPhysical\
|
| 376 |
+
\ Location:\n\n6420 Poplar Avenue\n\nMemphis, TN\n\nFlexible Remote Work Schedule\n\
|
| 377 |
+
\nThe Job You Will Perform\n\nLead the hands-on IT development and deployment\
|
| 378 |
+
\ of data science and advanced analytics solutions for the North American Container\
|
| 379 |
+
\ (NAC) division of International Paper to support business strategies across\
|
| 380 |
+
\ approximately 200 packaging and specialty plants in the US and MexicoBreak down\
|
| 381 |
+
\ complex data science methodologies to business leaders in a way that is applicable\
|
| 382 |
+
\ to our North American Container business strategy.Identify opportunities for\
|
| 383 |
+
\ improving business performance and present identified opportunities to senior\
|
| 384 |
+
\ leadership; proactively driving the discovery of business value through data.Collaborate\
|
| 385 |
+
\ directly with NAC business partners to produce user stories, analyze source\
|
| 386 |
+
\ data capabilities, identify issues and opportunities, develop data models, and\
|
| 387 |
+
\ test and deploy innovative analytics solutions and systemsLead the application\
|
| 388 |
+
\ of data science techniques to analyze and interpret complex data sets, providing\
|
| 389 |
+
\ insights and enabling data-driven decision-making for North American ContainerLead\
|
| 390 |
+
\ analytics projects through agile or traditional project management methodologiesInfluence\
|
| 391 |
+
\ IT projects/initiatives with project managers, business leaders and other IT\
|
| 392 |
+
\ groups without direct reporting relationships.Work closely with IT Application\
|
| 393 |
+
\ Services team members to follow standards, best practices, and consultation\
|
| 394 |
+
\ for data engineeringRole includes: Data analysis, predictive and prescriptive\
|
| 395 |
+
\ modeling, machine learning, and algorithm development; collaborating and cross-training\
|
| 396 |
+
\ with analytics and visualization teams.Under general direction works on complex\
|
| 397 |
+
\ technical issues/problems of a large scope, impact, or importance. Independently\
|
| 398 |
+
\ resolves complex problems that have significant cost. Leads new technology innovations\
|
| 399 |
+
\ that define new “frontiers” in technical direction\n\nThe Skills You Will Bring\
|
| 400 |
+
\ \n\nBachelor’s degree in Computer Science, Information Technology, Statistics,\
|
| 401 |
+
\ or a related field is required. A Masters degree and/or PhD is preferred.Minimum\
|
| 402 |
+
\ 12 years of relevant work experience, less if holding a Masters or PhD.Skills\
|
| 403 |
+
\ with Data Visualization using tools like Microsoft Power BIDemonstrated leadership\
|
| 404 |
+
\ in building and deploying advanced analytics models for solving real business\
|
| 405 |
+
\ problems.Strong Interpersonal and Communication SkillsAdaptable to a changing\
|
| 406 |
+
\ work environment and dealing with ambiguity as it arises. Data Science Skills:Data\
|
| 407 |
+
\ analysisPredictive and Prescriptive ModelingMachine Learning (Python / R)Artificial\
|
| 408 |
+
\ Intelligence and Large Language ModelsAlgorithm DevelopmentExperience with Azure\
|
| 409 |
+
\ Analytics ServicesCompetencies:Dealing with AmbiguityFunctional / Technical\
|
| 410 |
+
\ Skills Problem SolvingCreativity\nThe Benefits You Will Enjoy\n\nPaid time off\
|
| 411 |
+
\ including Vacation and Holidays Retirement and 401k Matching ProgramMedical\
|
| 412 |
+
\ & Dental Education & Development (including Tuition Reimbursement)Life & Disability\
|
| 413 |
+
\ Insurance\n\nThe Career You Will Build\n\nLeadership trainingPromotional opportunities\n\
|
| 414 |
+
\nThe Impact You Will Make\n\nWe continue to build a better future for people,\
|
| 415 |
+
\ the plant, and our company! IP has been a good steward of sustainable practices\
|
| 416 |
+
\ across communities around the world for more than 120 years. Join our team and\
|
| 417 |
+
\ you’ll see why our team members say they’re Proud to be IP.\n\nThe Culture You\
|
| 418 |
+
\ Will Experience\n\nInternational Paper promotes employee well-being by providing\
|
| 419 |
+
\ safe, caring and inclusive workplaces. You will learn Safety Leadership Principles\
|
| 420 |
+
\ and have the opportunity to opt into Employee Networking Circles such as IPVets,\
|
| 421 |
+
\ IPride, Women in IP, and the African American ENC. We invite you to bring your\
|
| 422 |
+
\ uniqueness, creativity, talents, experiences, and safety mindset to be a part\
|
| 423 |
+
\ of our increasingly diverse culture.\n\nThe Company You Will Join\n\nInternational\
|
| 424 |
+
\ Paper (NYSE: IP) is a leading global supplier of renewable fiber-based products.\
|
| 425 |
+
\ We produce corrugated packaging products that protect and promote goods, and\
|
| 426 |
+
\ enable worldwide commerce, and pulp for diapers, tissue and other personal care\
|
| 427 |
+
\ products that promote health and wellness. Headquartered in Memphis, Tenn.,\
|
| 428 |
+
\ we employ approximately 38,000 colleagues globally. We serve customers worldwide,\
|
| 429 |
+
\ with manufacturing operations in North America, Latin America, North Africa\
|
| 430 |
+
\ and Europe. Net sales for 2021 were $19.4 billion. Additional information can\
|
| 431 |
+
\ be found by visiting InternationalPaper.com.\n\nInternational Paper is an Equal\
|
| 432 |
+
\ Opportunity/Affirmative Action Employer. All qualified applicants will receive\
|
| 433 |
+
\ consideration for employment without regard to sex, gender identity, sexual\
|
| 434 |
+
\ orientation, race, color, religion, national origin, disability, protected veteran\
|
| 435 |
+
\ status, age, or any other characteristic protected by law."
|
| 436 |
+
- "skills and business acumen to drive impactful results that inform strategic decisions.Commitment\
|
| 437 |
+
\ to iterative development, with a proven ability to engage and update stakeholders\
|
| 438 |
+
\ bi-weekly or as necessary, ensuring alignment, feedback incorporation, and transparency\
|
| 439 |
+
\ throughout the project lifecycle.Project ownership and development from inception\
|
| 440 |
+
\ to completion, encompassing tasks such as gathering detailed requirements, data\
|
| 441 |
+
\ preparation, model creation, result generation, and data visualization. Develop\
|
| 442 |
+
\ insights, methods or tools using various analytic methods such as causal-model\
|
| 443 |
+
\ approaches, predictive modeling, regressions, machine learning, time series\
|
| 444 |
+
\ analysis, etc.Handle large amounts of data from multiple and disparate sources,\
|
| 445 |
+
\ employing advanced Python and SQL techniques to ensure efficiency and accuracyUphold\
|
| 446 |
+
\ the highest standards of data integrity and security, aligning with both internal\
|
| 447 |
+
\ and external regulatory requirements and compliance protocols\n\nRequired Qualifications,\
|
| 448 |
+
\ Capabilities, And Skills\n\nPhD or MSc. in a scientific field (Computer Science,\
|
| 449 |
+
\ Engineering, Operations Research, etc.) plus 6 years or more of experience in\
|
| 450 |
+
\ producing advanced analytics work with an emphasis in optimizationStrong proficiency\
|
| 451 |
+
\ in statistical software packages and data tools, including Python and SQLStrong\
|
| 452 |
+
\ proficiency in Advanced Statistical methods and concepts, predictive modeling,\
|
| 453 |
+
\ time series forecasting, text miningStrong proficiency in Data Mining & Visualization\
|
| 454 |
+
\ (Tableau experienced preferred)Experience in Cloud and Big Data platforms such\
|
| 455 |
+
\ as AWS, Snowflake, Hadoop, Hive, Pig, Apache Spark, etc.Strong story telling\
|
| 456 |
+
\ capabilities including communicating complex concepts into digestible information\
|
| 457 |
+
\ to be consumed by audiences of varying levels in the organizationStrong commitment\
|
| 458 |
+
\ to iterative development, with a proven ability to engage and update stakeholders\
|
| 459 |
+
\ bi-weekly or as necessary, ensuring alignment, feedback incorporation, and transparency\
|
| 460 |
+
\ throughout the project lifecycle.\n\nPreferred Qualifications, Capabilities,\
|
| 461 |
+
\ And Skills\n\nFinancial Service industry experience preferredExperience / Understanding\
|
| 462 |
+
\ of Cloud Storage (Object Stores like S3, Blob; NoSQL like Columnar, Graph databases)\
|
| 463 |
+
\ \n\nABOUT US\n\nChase is a leading financial services firm, helping nearly half\
|
| 464 |
+
\ of America’s households and small businesses achieve their financial goals through\
|
| 465 |
+
\ a broad range of financial products. Our mission is to create engaged, lifelong\
|
| 466 |
+
\ relationships and put our customers at the heart of everything we do. We also\
|
| 467 |
+
\ help small businesses, nonprofits and cities grow, delivering solutions to solve\
|
| 468 |
+
\ all their financial needs.\n\nWe offer a competitive total rewards package including\
|
| 469 |
+
\ base salary determined based on the role, experience, skill set, and location.\
|
| 470 |
+
\ For those in eligible roles, discretionary incentive compensation which may\
|
| 471 |
+
\ be awarded in recognition of individual achievements and contributions. We also\
|
| 472 |
+
\ offer a range of benefits and programs to meet employee needs, based on eligibility.\
|
| 473 |
+
\ These benefits include comprehensive health care coverage, on-site health and\
|
| 474 |
+
\ wellness centers, a retirement savings plan, backup childcare, tuition reimbursement,\
|
| 475 |
+
\ mental health support, financial coaching and more. Additional details about\
|
| 476 |
+
\ total compensation and benefits will be provided during the hiring process.\n\
|
| 477 |
+
\nWe recognize that our people are our strength and the diverse talents they bring\
|
| 478 |
+
\ to our global workforce are directly linked to our success. We are \n\nEqual\
|
| 479 |
+
\ Opportunity Employer/Disability/Veterans\n\nAbout The Team\n\nOur Consumer &\
|
| 480 |
+
\ Community Banking division serves our Chase customers through a range of financial\
|
| 481 |
+
\ services, including personal banking, credit cards, mortgages, auto financing,\
|
| 482 |
+
\ investment advice, small business loans and payment processing. We’re proud\
|
| 483 |
+
\ to lead the U.S. in credit card sales and deposit growth and have the most-used\
|
| 484 |
+
\ digital solutions – all while ranking first in customer satisfaction."
|
| 485 |
+
- 'requirementsCollaborate with data engineers and data analysts to understand data
|
| 486 |
+
needs and translate them into technical solutionsOptimize Snowflake warehouse
|
| 487 |
+
configurations and DBT models for performance and cost efficiencyTroubleshoot
|
| 488 |
+
and resolve data pipeline issues, ensuring smooth and efficient data flowParticipate
|
| 489 |
+
in code reviews and provide feedback to team members to ensure code quality and
|
| 490 |
+
adherence to best practicesStay updated with the latest developments in Snowflake
|
| 491 |
+
and DBT technologies, and propose and implement innovative solutionsDocument data
|
| 492 |
+
pipelines, transformations, and processes to facilitate knowledge sharing and
|
| 493 |
+
maintain data lineageWork closely with cross-functional teams to support data-driven
|
| 494 |
+
decision-making and business objectivesContribute to agile project planning and
|
| 495 |
+
execution related to data engineering tasks and initiatives
|
| 496 |
+
|
| 497 |
+
Skills8+ years of experience working on relational databases, SQL, and stored
|
| 498 |
+
proceduresAdvanced working SQL knowledge and experience working with relational
|
| 499 |
+
databases, query authoring (SQL) as well as working familiarity with a variety
|
| 500 |
+
of databases such as DBT and Snowflake for Data WarehouseAt least 3+ years of
|
| 501 |
+
experience working on Snowflake, building data warehousing solutions, dealing
|
| 502 |
+
with slowly changing dimensions as wellHighly preferred to have prior experience
|
| 503 |
+
in creating DW models on SAP ECC, Salesforce systemsAt least 3+ years of experience
|
| 504 |
+
in developing and deploying data transformations using DBT, including creating/debugging
|
| 505 |
+
macros5+ experience in supporting end-to-end data model build and maintenance,
|
| 506 |
+
including testing/UATBuild, maintain and test data pipelines using cloud ETL/ELT
|
| 507 |
+
tools, preferably SnapLogicPrior experience in working on SAP HANA'
|
| 508 |
+
- source_sentence: Marketing effectiveness measurement, content performance analysis,
|
| 509 |
+
A/B testing for social media
|
| 510 |
+
sentences:
|
| 511 |
+
- 'requirements, prioritize tasks, and deliverintegrated solutions.Documentation
|
| 512 |
+
and Best Practices: Document design decisions, implementation details, and bestpractices
|
| 513 |
+
for data engineering processes, ensuring knowledge sharing and continuous improvementwithin
|
| 514 |
+
the team.Qualifications:Bachelor''s or Master''s degree in Computer Science, Engineering,
|
| 515 |
+
or related field.Proven experience as a Data Engineer, preferably with specialization
|
| 516 |
+
in handling image data.Strong proficiency in cloud computing platforms (e.g.,
|
| 517 |
+
AWS, Azure, Google Cloud) and related services(e.g., S3, EC2, Lambda, Kubernetes).Experience
|
| 518 |
+
with data engineering tools like DataBrick, Snowflake, Glue etc.Proficiency in
|
| 519 |
+
programming languages commonly used in data engineering (e.g., Python, Scala,
|
| 520 |
+
Java) andfamiliarity with relevant libraries and frameworks (e.g., Apache Spark,
|
| 521 |
+
TensorFlow, OpenCV).Solid understanding of data modeling, schema design, and database
|
| 522 |
+
technologies (e.g., SQL, NoSQL,data warehouses).Familiarity with DevOps practices,
|
| 523 |
+
CI/CD pipelines, and containerization technologies (e.g., Docker,Kubernetes).Strong
|
| 524 |
+
problem-solving skills, analytical thinking, and attention to detail.Excellent
|
| 525 |
+
communication and collaboration skills, with the ability to work effectively in
|
| 526 |
+
a cross-functionalteam environment.'
|
| 527 |
+
- 'Hi All,
|
| 528 |
+
|
| 529 |
+
This is Nithya from TOPSYSIT, We have a job requirement for Data Scientist with
|
| 530 |
+
GenAI. If anyone interested please send me your updated resume along with contact
|
| 531 |
+
details to [email protected]
|
| 532 |
+
|
| 533 |
+
Any Visa is Fine on W2 except H1B ,OPT and CPT.If GC holders who can share PPN
|
| 534 |
+
along with proper documentation are eligible
|
| 535 |
+
|
| 536 |
+
Job Title Data Scientist with GenAILocation: Plano, TX-OnsiteEXP: 10 Years Description:Competencies:
|
| 537 |
+
SQL, Natural Language Processing (NLP), Python, PySpark/ApacheSpark, Databricks.Python
|
| 538 |
+
libraries: Numpy, Pandas, SK-Learn, Matplotlib, Tensorflow, PyTorch.Deep Learning:
|
| 539 |
+
ANN, RNN, LSTM, CNN, Computer vision.NLP: NLTK, Word Embedding, BOW, TF-IDF, World2Vec,
|
| 540 |
+
BERT.Framework: Flask or similar.
|
| 541 |
+
|
| 542 |
+
Thanks & Regards,Nithya Kandee:[email protected]:678-899-6898'
|
| 543 |
+
- 'Skills:5+ years of marketing or business analytics experience with synthesizing
|
| 544 |
+
large-scale data sets to generate insights and recommendations.5+ years of working
|
| 545 |
+
experience using SQL, Excel, Tableau, and/or Power B. R & Python knowledge are
|
| 546 |
+
preferred.Understanding of the data science models used for measuring marketing
|
| 547 |
+
incrementality, e.g. multi-touch attribution, marketing mix models, causal inference,
|
| 548 |
+
time-series regression, match market test, etc....Understanding of the full-funnel
|
| 549 |
+
cross-platform marketing and media landscape and experience evolving analytics
|
| 550 |
+
and measurement capabilities.Flexibility in priority shifts and fast iterations/agile
|
| 551 |
+
working environment.Strong problem-solving skills, and ability to structure problems
|
| 552 |
+
into an analytics plan.
|
| 553 |
+
|
| 554 |
+
Pride Global offers eligible employee’s comprehensive healthcare coverage (medical,
|
| 555 |
+
dental, and vision plans), supplemental coverage (accident insurance, critical
|
| 556 |
+
illness insurance and hospital indemnity), 401(k)-retirement savings, life & disability
|
| 557 |
+
insurance, an employee assistance program, legal support, auto, home insurance,
|
| 558 |
+
pet insurance and employee discounts with preferred vendors.'
|
| 559 |
+
pipeline_tag: sentence-similarity
|
| 560 |
+
library_name: sentence-transformers
|
| 561 |
+
metrics:
|
| 562 |
+
- cosine_accuracy
|
| 563 |
+
model-index:
|
| 564 |
+
- name: SentenceTransformer based on sentence-transformers/all-distilroberta-v1
|
| 565 |
+
results:
|
| 566 |
+
- task:
|
| 567 |
+
type: triplet
|
| 568 |
+
name: Triplet
|
| 569 |
+
dataset:
|
| 570 |
+
name: ai job validation
|
| 571 |
+
type: ai-job-validation
|
| 572 |
+
metrics:
|
| 573 |
+
- type: cosine_accuracy
|
| 574 |
+
value: 0.9875
|
| 575 |
+
name: Cosine Accuracy
|
| 576 |
+
- task:
|
| 577 |
+
type: triplet
|
| 578 |
+
name: Triplet
|
| 579 |
+
dataset:
|
| 580 |
+
name: ai job test
|
| 581 |
+
type: ai-job-test
|
| 582 |
+
metrics:
|
| 583 |
+
- type: cosine_accuracy
|
| 584 |
+
value: 0.975609756097561
|
| 585 |
+
name: Cosine Accuracy
|
| 586 |
+
---
|
| 587 |
+
|
| 588 |
+
# SentenceTransformer based on sentence-transformers/all-distilroberta-v1
|
| 589 |
+
|
| 590 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/all-distilroberta-v1](https://huggingface.co/sentence-transformers/all-distilroberta-v1). It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
| 591 |
+
|
| 592 |
+
## Model Details
|
| 593 |
+
|
| 594 |
+
### Model Description
|
| 595 |
+
- **Model Type:** Sentence Transformer
|
| 596 |
+
- **Base model:** [sentence-transformers/all-distilroberta-v1](https://huggingface.co/sentence-transformers/all-distilroberta-v1) <!-- at revision 842eaed40bee4d61673a81c92d5689a8fed7a09f -->
|
| 597 |
+
- **Maximum Sequence Length:** 512 tokens
|
| 598 |
+
- **Output Dimensionality:** 768 dimensions
|
| 599 |
+
- **Similarity Function:** Cosine Similarity
|
| 600 |
+
<!-- - **Training Dataset:** Unknown -->
|
| 601 |
+
<!-- - **Language:** Unknown -->
|
| 602 |
+
<!-- - **License:** Unknown -->
|
| 603 |
+
|
| 604 |
+
### Model Sources
|
| 605 |
+
|
| 606 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
| 607 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
| 608 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
| 609 |
+
|
| 610 |
+
### Full Model Architecture
|
| 611 |
+
|
| 612 |
+
```
|
| 613 |
+
SentenceTransformer(
|
| 614 |
+
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: RobertaModel
|
| 615 |
+
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
| 616 |
+
(2): Normalize()
|
| 617 |
+
)
|
| 618 |
+
```
|
| 619 |
+
|
| 620 |
+
## Usage
|
| 621 |
+
|
| 622 |
+
### Direct Usage (Sentence Transformers)
|
| 623 |
+
|
| 624 |
+
First install the Sentence Transformers library:
|
| 625 |
+
|
| 626 |
+
```bash
|
| 627 |
+
pip install -U sentence-transformers
|
| 628 |
+
```
|
| 629 |
+
|
| 630 |
+
Then you can load this model and run inference.
|
| 631 |
+
```python
|
| 632 |
+
from sentence_transformers import SentenceTransformer
|
| 633 |
+
|
| 634 |
+
# Download from the 🤗 Hub
|
| 635 |
+
model = SentenceTransformer("Vishnu7796/my-finetuned-model")
|
| 636 |
+
# Run inference
|
| 637 |
+
sentences = [
|
| 638 |
+
'Marketing effectiveness measurement, content performance analysis, A/B testing for social media',
|
| 639 |
+
'Skills:5+ years of marketing or business analytics experience with synthesizing large-scale data sets to generate insights and recommendations.5+ years of working experience using SQL, Excel, Tableau, and/or Power B. R & Python knowledge are preferred.Understanding of the data science models used for measuring marketing incrementality, e.g. multi-touch attribution, marketing mix models, causal inference, time-series regression, match market test, etc....Understanding of the full-funnel cross-platform marketing and media landscape and experience evolving analytics and measurement capabilities.Flexibility in priority shifts and fast iterations/agile working environment.Strong problem-solving skills, and ability to structure problems into an analytics plan.\nPride Global offers eligible employee’s comprehensive healthcare coverage (medical, dental, and vision plans), supplemental coverage (accident insurance, critical illness insurance and hospital indemnity), 401(k)-retirement savings, life & disability insurance, an employee assistance program, legal support, auto, home insurance, pet insurance and employee discounts with preferred vendors.',
|
| 640 |
+
'Hi All,\nThis is Nithya from TOPSYSIT, We have a job requirement for Data Scientist with GenAI. If anyone interested please send me your updated resume along with contact details to [email protected]\nAny Visa is Fine on W2 except H1B ,OPT and CPT.If GC holders who can share PPN along with proper documentation are eligible\nJob Title Data Scientist with GenAILocation: Plano, TX-OnsiteEXP: 10 Years Description:Competencies: SQL, Natural Language Processing (NLP), Python, PySpark/ApacheSpark, Databricks.Python libraries: Numpy, Pandas, SK-Learn, Matplotlib, Tensorflow, PyTorch.Deep Learning: ANN, RNN, LSTM, CNN, Computer vision.NLP: NLTK, Word Embedding, BOW, TF-IDF, World2Vec, BERT.Framework: Flask or similar.\nThanks & Regards,Nithya Kandee:[email protected]:678-899-6898',
|
| 641 |
+
]
|
| 642 |
+
embeddings = model.encode(sentences)
|
| 643 |
+
print(embeddings.shape)
|
| 644 |
+
# [3, 768]
|
| 645 |
+
|
| 646 |
+
# Get the similarity scores for the embeddings
|
| 647 |
+
similarities = model.similarity(embeddings, embeddings)
|
| 648 |
+
print(similarities.shape)
|
| 649 |
+
# [3, 3]
|
| 650 |
+
```
|
| 651 |
+
|
| 652 |
+
<!--
|
| 653 |
+
### Direct Usage (Transformers)
|
| 654 |
+
|
| 655 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
| 656 |
+
|
| 657 |
+
</details>
|
| 658 |
+
-->
|
| 659 |
+
|
| 660 |
+
<!--
|
| 661 |
+
### Downstream Usage (Sentence Transformers)
|
| 662 |
+
|
| 663 |
+
You can finetune this model on your own dataset.
|
| 664 |
+
|
| 665 |
+
<details><summary>Click to expand</summary>
|
| 666 |
+
|
| 667 |
+
</details>
|
| 668 |
+
-->
|
| 669 |
+
|
| 670 |
+
<!--
|
| 671 |
+
### Out-of-Scope Use
|
| 672 |
+
|
| 673 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
| 674 |
+
-->
|
| 675 |
+
|
| 676 |
+
## Evaluation
|
| 677 |
+
|
| 678 |
+
### Metrics
|
| 679 |
+
|
| 680 |
+
#### Triplet
|
| 681 |
+
|
| 682 |
+
* Datasets: `ai-job-validation` and `ai-job-test`
|
| 683 |
+
* Evaluated with [<code>TripletEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator)
|
| 684 |
+
|
| 685 |
+
| Metric | ai-job-validation | ai-job-test |
|
| 686 |
+
|:--------------------|:------------------|:------------|
|
| 687 |
+
| **cosine_accuracy** | **0.9875** | **0.9756** |
|
| 688 |
+
|
| 689 |
+
<!--
|
| 690 |
+
## Bias, Risks and Limitations
|
| 691 |
+
|
| 692 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
| 693 |
+
-->
|
| 694 |
+
|
| 695 |
+
<!--
|
| 696 |
+
### Recommendations
|
| 697 |
+
|
| 698 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
| 699 |
+
-->
|
| 700 |
+
|
| 701 |
+
## Training Details
|
| 702 |
+
|
| 703 |
+
### Training Dataset
|
| 704 |
+
|
| 705 |
+
#### Unnamed Dataset
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
* Size: 647 training samples
|
| 709 |
+
* Columns: <code>query</code>, <code>job_description_pos</code>, and <code>job_description_neg</code>
|
| 710 |
+
* Approximate statistics based on the first 647 samples:
|
| 711 |
+
| | query | job_description_pos | job_description_neg |
|
| 712 |
+
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 713 |
+
| type | string | string | string |
|
| 714 |
+
| details | <ul><li>min: 8 tokens</li><li>mean: 15.05 tokens</li><li>max: 40 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 350.34 tokens</li><li>max: 512 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 352.82 tokens</li><li>max: 512 tokens</li></ul> |
|
| 715 |
+
* Samples:
|
| 716 |
+
| query | job_description_pos | job_description_neg |
|
| 717 |
+
|:--------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 718 |
+
| <code>healthcare data analytics, pregnancy identification algorithms, causal modeling techniques</code> | <code>experience in using, manipulating, and extracting insights from healthcare data with a particular focus on using machine learning with claims data. The applicant will be driven by curiosity, collaborating with a cross-functional team of Product Managers, Software Engineers, and Data Analysts.<br><br>Responsibilities<br><br>Apply data science, machine learning, and healthcare domain expertise to advance and oversee Lucina’s pregnancy identification and risk-scoring algorithms.Analyze healthcare data to study patterns of care and patient conditions which correlate to specific outcomes.Collaborate on clinical committee research and development work.Complete ad hoc analyses and reports from internal or external customers prioritized by management throughout the year.<br><br>Qualifications<br><br>Degree or practical experience in Applied Math, Statistics, Engineering, Information Management with 3 or more years of data analytics experience, Masters degree a plus.Experience manipulating and analyzing healthcare dat...</code> | <code>Experience of Delta Lake, DWH, Data Integration, Cloud, Design and Data Modelling. Proficient in developing programs in Python and SQLExperience with Data warehouse Dimensional data modeling. Working with event based/streaming technologies to ingest and process data. Working with structured, semi structured and unstructured data. Optimize Databricks jobs for performance and scalability to handle big data workloads. Monitor and troubleshoot Databricks jobs, identify and resolve issues or bottlenecks. Implement best practices for data management, security, and governance within the Databricks environment. Experience designing and developing Enterprise Data Warehouse solutions. Proficient writing SQL queries and programming including stored procedures and reverse engineering existing process. Perform code reviews to ensure fit to requirements, optimal execution patterns and adherence to established standards. <br><br>Requirements: <br><br>You are:<br><br>Minimum 9+ years of experience is required. 5+ years...</code> |
|
| 719 |
+
| <code>Data Engineer Python Azure API integration</code> | <code>experience preferred but not required.<br>Must-Have Skills:10+ years of total IT experience required.of 4 years of proven and relevant experience in a similar Data Engineer role and/or Python Dev role.Strong proficiency in Python programming is essential for data manipulation, pipeline development, and integration tasks.In-depth knowledge of SQL for database querying, data manipulation, and performance optimization.Experience working with RESTful APIs and integrating data from external sources using API calls.Azure: Proficiency in working with Microsoft Azure cloud platform, including services like Azure Data Factory, Azure Databricks, and Azure Storage.</code> | <code>requirements;Research & implement new data products or capabilitiesAutomate data visualization and reporting capabilities that empower users (both internal and external) to access data on their own thereby improving quality, accuracy and speedSynthesize raw data into actionable insights to drive business results, identify key trends and opportunities for business teams and report the findings in a simple, compelling wayEvaluate and approve additional data partners or data assets to be utilized for identity resolution, targeting or measurementEnhance PulsePoint's data reporting and insights generation capability by publishing internal reports about Health dataAct as the “Subject Matter Expert” to help internal teams understand the capabilities of our platforms, how to implement & troubleshoot<br>RequirementsWhat are the ‘must haves’ we’re looking for?Minimum 3-5 years of relevant experience in:Creating SQL queries from scratch using real business data;Highly proficient knowledge of Excel (...</code> |
|
| 720 |
+
| <code>Data Engineer big data technologies, cloud data warehousing, real-time data streaming</code> | <code>experience in machine learning, distributed microservices, and full stack systems Utilize programming languages like Java, Scala, Python and Open Source RDBMS and NoSQL databases and Cloud based data warehousing services such as Redshift and Snowflake Share your passion for staying on top of tech trends, experimenting with and learning new technologies, participating in internal & external technology communities, and mentoring other members of the engineering community Collaborate with digital product managers, and deliver robust cloud-based solutions that drive powerful experiences to help millions of Americans achieve financial empowerment Perform unit tests and conduct reviews with other team members to make sure your code is rigorously designed, elegantly coded, and effectively tuned for performance <br><br>Basic Qualifications:<br><br> Bachelor’s Degree At least 2 years of experience in application development (Internship experience does not apply) At least 1 year of experience in big d...</code> | <code>requirements of analyses and reports.Transform requirements into actionable, high-quality deliverables.Perform periodic and ad-hoc operations data analysis to measure performance and conduct root cause analysis for Claims, FRU, G&A, Provider and UM data.Compile, analyze and provide reporting that identifies and defines actionable information or recommends possible solutions for corrective actions.Partner with other Operations areas as needed to provide technical and other support in the development, delivery, maintenance, and enhancement of analytical reports and analyses.Collaborate with Operations Tower Leaders in identifying and recommending operational performance metrics; map metrics against targets and the company’s operational plans and tactical/strategic goals to ensure alignment and focus.Serve as a liaison with peers in other departments to ensure data integrity.Code and schedule reports using customer business requirements from Claims, FRU, G&A, Provider and UM data.<br><br>Princi...</code> |
|
| 721 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 722 |
+
```json
|
| 723 |
+
{
|
| 724 |
+
"scale": 20.0,
|
| 725 |
+
"similarity_fct": "cos_sim"
|
| 726 |
+
}
|
| 727 |
+
```
|
| 728 |
+
|
| 729 |
+
### Evaluation Dataset
|
| 730 |
+
|
| 731 |
+
#### Unnamed Dataset
|
| 732 |
+
|
| 733 |
+
|
| 734 |
+
* Size: 80 evaluation samples
|
| 735 |
+
* Columns: <code>query</code>, <code>job_description_pos</code>, and <code>job_description_neg</code>
|
| 736 |
+
* Approximate statistics based on the first 80 samples:
|
| 737 |
+
| | query | job_description_pos | job_description_neg |
|
| 738 |
+
|:--------|:---------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
|
| 739 |
+
| type | string | string | string |
|
| 740 |
+
| details | <ul><li>min: 8 tokens</li><li>mean: 14.9 tokens</li><li>max: 25 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 354.31 tokens</li><li>max: 512 tokens</li></ul> | <ul><li>min: 31 tokens</li><li>mean: 334.05 tokens</li><li>max: 512 tokens</li></ul> |
|
| 741 |
+
* Samples:
|
| 742 |
+
| query | job_description_pos | job_description_neg |
|
| 743 |
+
|:----------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 744 |
+
| <code>Data analysis, operations reporting, SQL expertise</code> | <code>requirements, determine technical issues, and design reports to meet data analysis needsDeveloping and maintaining web-based dashboards for real-time reporting of key performance indicators for Operations. Dashboards must be simple to use, easy to understand, and accurate.Maintenance of current managerial reports and development of new reportsDevelop and maintain reporting playbook and change logOther duties in the PUA department as assigned<br><br>What YOU Will Bring To C&F<br><br>Solid analytical and problem solving skillsIntuitive, data-oriented with a creative, solutions-based approachAbility to manage time, multi-task and prioritizes multiple assignments effectivelyAbility to work independently and as part of a teamAble to recognize and analyze business and data issues with minimal supervision, ability to escalate when necessaryAble to identify cause and effect relationships in data and work process flows<br><br>Requirements<br><br>3 years in an Analyst role is requiredA Bachelor’s degree in associated f...</code> | <code>experience in data engineering, software engineering, data analytics, or machine learning.Strong expertise working with one or more cloud data platforms (Snowflake, Sagemaker, Databricks, etc.)Experience managing Snowflake infrastructure with terraform.Experience building batch, near real-time, and real-time data integrations with multiple sources including event streams, APIs, relational databases, noSQL databases, graph databases, document stores, and cloud object stores.Strong ability to debug, write, and optimize SQL queries in dbt. Experience with dbt is a must.Strong programming experience in one or more modern programming languages (Python, Clojure, Scala, Java, etc.)Experience working with both structured and semi-structured data.Experience with the full software development lifecycle including requirements gathering, design, implementation, testing, deployment, and iteration.Strong understanding of CI/CD principles.Strong ability to document, diagram, and deliver detailed pres...</code> |
|
| 745 |
+
| <code>AWS Sagemaker, ML Model Deployment, Feedback Loop Automation</code> | <code>Qualifications<br><br>AWS tools and solutions including Sagemaker, Redshift, AthenaExperience with Machine learning libraries such as PyTorchHands-on experience with designing, developing and deploying workflows with ML models with feedback loops; Uses Bitbucket workflows and has experience with CI/CDDeep experience in at least two of the following languages: PySpark/Spark, Python, CWorking knowledge of AI/ML algorithms. Large language models (LLMs), Retrieval-augmented generation (RAN), Clustering algorithms (such as K-Means), Binary classifiers (such as XGBoost)High level of self-starter, learning, and initiative behaviors Preferred:Background as a software engineer and experience as a data scientistFeatures Stores<br><br>Why Teaching Strategies<br><br>At Teaching Strategies, our solutions and services are only as strong as the teams that create them. By bringing passion, dedication, and creativity to your job every day, there's no telling what you can do and where you can go! We provide a competitive...</code> | <code>requirements and metrics.<br>Provide training and support to end-users on data quality best practices and tools.<br>Develop and maintain documentation related to data quality processes.<br><br> Education Qualification: <br><br>Bachelor's degree in a related field such as Data Science, Computer Science, or Information Systems.<br><br>Required Skills: <br><br>Experience working as a BA/Data Analyst in a Data warehouse/Data governance platform.<br>Strong analytical and problem-solving skills.<br>Proficiency in SQL, data analysis, and data visualization tools. <br>Critical thinking.<br>Ability to understand and examine complex datasets.<br>Ability to interpret Data quality results and metrics.<br><br>Desired Skills:<br><br>Knowledge of Data quality standards and processes.<br>Proven experience in a Data Quality Analyst or similar role.<br>Experience with data quality tools such as Informatica, PowerCurve, or Collibra DQ is preferred.<br>Certifications in data management or quality assurance (e.g.<br>Certified Data Management Professional, Certified Quality ...</code> |
|
| 746 |
+
| <code>Financial analysis, process re-engineering, client relationship management</code> | <code>skills:<br> BA/BS degree in finance-related field and/or 2+ years working in finance or related field Strong working knowledge of Microsoft Office (especially Excel) Ability to work in a fast-paced environment and attention to detail. This role includes reviews and reconciliation of financial information.<br>General Position Summary<br>The Business Analyst performs professional duties related to the review, assessment and development of business systems and processes as well as new client requirements. This includes reviewing existing processes to develop strong QA procedures as well as maximizing review efficiencies and internal controls through process re-engineering. The Business Analyst will assist with the development of seamless solutions for unique requirements of new clients, delivered and implemented on time and within scope. This role will ensure that all activity, reconciliation, reporting, and analysis is carried out in an effective, timely and accurate manner and will look for cont...</code> | <code>Skills / Experience:Required: Proficiency with Python, pyTorch, Linux, Docker, Kubernetes, Jupyter. Expertise in Deep Learning, Transformers, Natural Language Processing, Large Language Models<br>Preferred: Experience with genomics data, molecular genetics. Distributed computing tools like Ray, Dask, Spark.<br>Thanks & RegardsBharat Priyadarshan GuntiHead of Recruitment & OperationsStellite Works LLC4841 W Stonegate Circle Lake Orion MI - 48359Contact: 313 221 [email protected] ||www.stelliteworks.comNote: This is not an unsolicited mail. If you are not interested in receiving our e-mails, then please reply with subject line Remove.Oh! Lord, you are my Righteousness and My Pride</code> |
|
| 747 |
+
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 748 |
+
```json
|
| 749 |
+
{
|
| 750 |
+
"scale": 20.0,
|
| 751 |
+
"similarity_fct": "cos_sim"
|
| 752 |
+
}
|
| 753 |
+
```
|
| 754 |
+
|
| 755 |
+
### Training Hyperparameters
|
| 756 |
+
#### Non-Default Hyperparameters
|
| 757 |
+
|
| 758 |
+
- `eval_strategy`: steps
|
| 759 |
+
- `per_device_train_batch_size`: 16
|
| 760 |
+
- `per_device_eval_batch_size`: 16
|
| 761 |
+
- `learning_rate`: 2e-05
|
| 762 |
+
- `num_train_epochs`: 1
|
| 763 |
+
- `warmup_ratio`: 0.1
|
| 764 |
+
- `batch_sampler`: no_duplicates
|
| 765 |
+
|
| 766 |
+
#### All Hyperparameters
|
| 767 |
+
<details><summary>Click to expand</summary>
|
| 768 |
+
|
| 769 |
+
- `overwrite_output_dir`: False
|
| 770 |
+
- `do_predict`: False
|
| 771 |
+
- `eval_strategy`: steps
|
| 772 |
+
- `prediction_loss_only`: True
|
| 773 |
+
- `per_device_train_batch_size`: 16
|
| 774 |
+
- `per_device_eval_batch_size`: 16
|
| 775 |
+
- `per_gpu_train_batch_size`: None
|
| 776 |
+
- `per_gpu_eval_batch_size`: None
|
| 777 |
+
- `gradient_accumulation_steps`: 1
|
| 778 |
+
- `eval_accumulation_steps`: None
|
| 779 |
+
- `torch_empty_cache_steps`: None
|
| 780 |
+
- `learning_rate`: 2e-05
|
| 781 |
+
- `weight_decay`: 0.0
|
| 782 |
+
- `adam_beta1`: 0.9
|
| 783 |
+
- `adam_beta2`: 0.999
|
| 784 |
+
- `adam_epsilon`: 1e-08
|
| 785 |
+
- `max_grad_norm`: 1.0
|
| 786 |
+
- `num_train_epochs`: 1
|
| 787 |
+
- `max_steps`: -1
|
| 788 |
+
- `lr_scheduler_type`: linear
|
| 789 |
+
- `lr_scheduler_kwargs`: {}
|
| 790 |
+
- `warmup_ratio`: 0.1
|
| 791 |
+
- `warmup_steps`: 0
|
| 792 |
+
- `log_level`: passive
|
| 793 |
+
- `log_level_replica`: warning
|
| 794 |
+
- `log_on_each_node`: True
|
| 795 |
+
- `logging_nan_inf_filter`: True
|
| 796 |
+
- `save_safetensors`: True
|
| 797 |
+
- `save_on_each_node`: False
|
| 798 |
+
- `save_only_model`: False
|
| 799 |
+
- `restore_callback_states_from_checkpoint`: False
|
| 800 |
+
- `no_cuda`: False
|
| 801 |
+
- `use_cpu`: False
|
| 802 |
+
- `use_mps_device`: False
|
| 803 |
+
- `seed`: 42
|
| 804 |
+
- `data_seed`: None
|
| 805 |
+
- `jit_mode_eval`: False
|
| 806 |
+
- `use_ipex`: False
|
| 807 |
+
- `bf16`: False
|
| 808 |
+
- `fp16`: False
|
| 809 |
+
- `fp16_opt_level`: O1
|
| 810 |
+
- `half_precision_backend`: auto
|
| 811 |
+
- `bf16_full_eval`: False
|
| 812 |
+
- `fp16_full_eval`: False
|
| 813 |
+
- `tf32`: None
|
| 814 |
+
- `local_rank`: 0
|
| 815 |
+
- `ddp_backend`: None
|
| 816 |
+
- `tpu_num_cores`: None
|
| 817 |
+
- `tpu_metrics_debug`: False
|
| 818 |
+
- `debug`: []
|
| 819 |
+
- `dataloader_drop_last`: False
|
| 820 |
+
- `dataloader_num_workers`: 0
|
| 821 |
+
- `dataloader_prefetch_factor`: None
|
| 822 |
+
- `past_index`: -1
|
| 823 |
+
- `disable_tqdm`: False
|
| 824 |
+
- `remove_unused_columns`: True
|
| 825 |
+
- `label_names`: None
|
| 826 |
+
- `load_best_model_at_end`: False
|
| 827 |
+
- `ignore_data_skip`: False
|
| 828 |
+
- `fsdp`: []
|
| 829 |
+
- `fsdp_min_num_params`: 0
|
| 830 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
| 831 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
| 832 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
| 833 |
+
- `deepspeed`: None
|
| 834 |
+
- `label_smoothing_factor`: 0.0
|
| 835 |
+
- `optim`: adamw_torch
|
| 836 |
+
- `optim_args`: None
|
| 837 |
+
- `adafactor`: False
|
| 838 |
+
- `group_by_length`: False
|
| 839 |
+
- `length_column_name`: length
|
| 840 |
+
- `ddp_find_unused_parameters`: None
|
| 841 |
+
- `ddp_bucket_cap_mb`: None
|
| 842 |
+
- `ddp_broadcast_buffers`: False
|
| 843 |
+
- `dataloader_pin_memory`: True
|
| 844 |
+
- `dataloader_persistent_workers`: False
|
| 845 |
+
- `skip_memory_metrics`: True
|
| 846 |
+
- `use_legacy_prediction_loop`: False
|
| 847 |
+
- `push_to_hub`: False
|
| 848 |
+
- `resume_from_checkpoint`: None
|
| 849 |
+
- `hub_model_id`: None
|
| 850 |
+
- `hub_strategy`: every_save
|
| 851 |
+
- `hub_private_repo`: None
|
| 852 |
+
- `hub_always_push`: False
|
| 853 |
+
- `gradient_checkpointing`: False
|
| 854 |
+
- `gradient_checkpointing_kwargs`: None
|
| 855 |
+
- `include_inputs_for_metrics`: False
|
| 856 |
+
- `include_for_metrics`: []
|
| 857 |
+
- `eval_do_concat_batches`: True
|
| 858 |
+
- `fp16_backend`: auto
|
| 859 |
+
- `push_to_hub_model_id`: None
|
| 860 |
+
- `push_to_hub_organization`: None
|
| 861 |
+
- `mp_parameters`:
|
| 862 |
+
- `auto_find_batch_size`: False
|
| 863 |
+
- `full_determinism`: False
|
| 864 |
+
- `torchdynamo`: None
|
| 865 |
+
- `ray_scope`: last
|
| 866 |
+
- `ddp_timeout`: 1800
|
| 867 |
+
- `torch_compile`: False
|
| 868 |
+
- `torch_compile_backend`: None
|
| 869 |
+
- `torch_compile_mode`: None
|
| 870 |
+
- `dispatch_batches`: None
|
| 871 |
+
- `split_batches`: None
|
| 872 |
+
- `include_tokens_per_second`: False
|
| 873 |
+
- `include_num_input_tokens_seen`: False
|
| 874 |
+
- `neftune_noise_alpha`: None
|
| 875 |
+
- `optim_target_modules`: None
|
| 876 |
+
- `batch_eval_metrics`: False
|
| 877 |
+
- `eval_on_start`: False
|
| 878 |
+
- `use_liger_kernel`: False
|
| 879 |
+
- `eval_use_gather_object`: False
|
| 880 |
+
- `average_tokens_across_devices`: False
|
| 881 |
+
- `prompts`: None
|
| 882 |
+
- `batch_sampler`: no_duplicates
|
| 883 |
+
- `multi_dataset_batch_sampler`: proportional
|
| 884 |
+
|
| 885 |
+
</details>
|
| 886 |
+
|
| 887 |
+
### Training Logs
|
| 888 |
+
| Epoch | Step | ai-job-validation_cosine_accuracy | ai-job-test_cosine_accuracy |
|
| 889 |
+
|:-----:|:----:|:---------------------------------:|:---------------------------:|
|
| 890 |
+
| 0 | 0 | 0.85 | - |
|
| 891 |
+
| 1.0 | 41 | 0.9875 | 0.9756 |
|
| 892 |
+
|
| 893 |
+
|
| 894 |
+
### Framework Versions
|
| 895 |
+
- Python: 3.11.12
|
| 896 |
+
- Sentence Transformers: 3.3.1
|
| 897 |
+
- Transformers: 4.48.0
|
| 898 |
+
- PyTorch: 2.6.0+cu124
|
| 899 |
+
- Accelerate: 1.5.2
|
| 900 |
+
- Datasets: 2.14.4
|
| 901 |
+
- Tokenizers: 0.21.1
|
| 902 |
+
|
| 903 |
+
## Citation
|
| 904 |
+
|
| 905 |
+
### BibTeX
|
| 906 |
+
|
| 907 |
+
#### Sentence Transformers
|
| 908 |
+
```bibtex
|
| 909 |
+
@inproceedings{reimers-2019-sentence-bert,
|
| 910 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
| 911 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
| 912 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
| 913 |
+
month = "11",
|
| 914 |
+
year = "2019",
|
| 915 |
+
publisher = "Association for Computational Linguistics",
|
| 916 |
+
url = "https://arxiv.org/abs/1908.10084",
|
| 917 |
+
}
|
| 918 |
+
```
|
| 919 |
+
|
| 920 |
+
#### MultipleNegativesRankingLoss
|
| 921 |
+
```bibtex
|
| 922 |
+
@misc{henderson2017efficient,
|
| 923 |
+
title={Efficient Natural Language Response Suggestion for Smart Reply},
|
| 924 |
+
author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
|
| 925 |
+
year={2017},
|
| 926 |
+
eprint={1705.00652},
|
| 927 |
+
archivePrefix={arXiv},
|
| 928 |
+
primaryClass={cs.CL}
|
| 929 |
+
}
|
| 930 |
+
```
|
| 931 |
+
|
| 932 |
+
<!--
|
| 933 |
+
## Glossary
|
| 934 |
+
|
| 935 |
+
*Clearly define terms in order to be accessible across audiences.*
|
| 936 |
+
-->
|
| 937 |
+
|
| 938 |
+
<!--
|
| 939 |
+
## Model Card Authors
|
| 940 |
+
|
| 941 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
| 942 |
+
-->
|
| 943 |
+
|
| 944 |
+
<!--
|
| 945 |
+
## Model Card Contact
|
| 946 |
+
|
| 947 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
| 948 |
+
-->
|
config.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"RobertaModel"
|
| 4 |
+
],
|
| 5 |
+
"attention_probs_dropout_prob": 0.1,
|
| 6 |
+
"bos_token_id": 0,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"eos_token_id": 2,
|
| 9 |
+
"gradient_checkpointing": false,
|
| 10 |
+
"hidden_act": "gelu",
|
| 11 |
+
"hidden_dropout_prob": 0.1,
|
| 12 |
+
"hidden_size": 768,
|
| 13 |
+
"initializer_range": 0.02,
|
| 14 |
+
"intermediate_size": 3072,
|
| 15 |
+
"layer_norm_eps": 1e-05,
|
| 16 |
+
"max_position_embeddings": 514,
|
| 17 |
+
"model_type": "roberta",
|
| 18 |
+
"num_attention_heads": 12,
|
| 19 |
+
"num_hidden_layers": 6,
|
| 20 |
+
"pad_token_id": 1,
|
| 21 |
+
"position_embedding_type": "absolute",
|
| 22 |
+
"torch_dtype": "float32",
|
| 23 |
+
"transformers_version": "4.51.3",
|
| 24 |
+
"type_vocab_size": 1,
|
| 25 |
+
"use_cache": true,
|
| 26 |
+
"vocab_size": 50265
|
| 27 |
+
}
|
config_sentence_transformers.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"__version__": {
|
| 3 |
+
"sentence_transformers": "3.4.1",
|
| 4 |
+
"transformers": "4.51.3",
|
| 5 |
+
"pytorch": "2.6.0+cu124"
|
| 6 |
+
},
|
| 7 |
+
"prompts": {},
|
| 8 |
+
"default_prompt_name": null,
|
| 9 |
+
"similarity_fn_name": "cosine"
|
| 10 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9db829be28a96b754f1fd28e89d656e4394900dd8b9690e79af549ccf6c0d9a6
|
| 3 |
+
size 328485128
|
modules.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"idx": 0,
|
| 4 |
+
"name": "0",
|
| 5 |
+
"path": "",
|
| 6 |
+
"type": "sentence_transformers.models.Transformer"
|
| 7 |
+
},
|
| 8 |
+
{
|
| 9 |
+
"idx": 1,
|
| 10 |
+
"name": "1",
|
| 11 |
+
"path": "1_Pooling",
|
| 12 |
+
"type": "sentence_transformers.models.Pooling"
|
| 13 |
+
},
|
| 14 |
+
{
|
| 15 |
+
"idx": 2,
|
| 16 |
+
"name": "2",
|
| 17 |
+
"path": "2_Normalize",
|
| 18 |
+
"type": "sentence_transformers.models.Normalize"
|
| 19 |
+
}
|
| 20 |
+
]
|
sentence_bert_config.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"max_seq_length": 512,
|
| 3 |
+
"do_lower_case": false
|
| 4 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"cls_token": {
|
| 10 |
+
"content": "<s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"eos_token": {
|
| 17 |
+
"content": "</s>",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"mask_token": {
|
| 24 |
+
"content": "<mask>",
|
| 25 |
+
"lstrip": true,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"pad_token": {
|
| 31 |
+
"content": "<pad>",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
},
|
| 37 |
+
"sep_token": {
|
| 38 |
+
"content": "</s>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false
|
| 43 |
+
},
|
| 44 |
+
"unk_token": {
|
| 45 |
+
"content": "<unk>",
|
| 46 |
+
"lstrip": false,
|
| 47 |
+
"normalized": false,
|
| 48 |
+
"rstrip": false,
|
| 49 |
+
"single_word": false
|
| 50 |
+
}
|
| 51 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"added_tokens_decoder": {
|
| 4 |
+
"0": {
|
| 5 |
+
"content": "<s>",
|
| 6 |
+
"lstrip": false,
|
| 7 |
+
"normalized": false,
|
| 8 |
+
"rstrip": false,
|
| 9 |
+
"single_word": false,
|
| 10 |
+
"special": true
|
| 11 |
+
},
|
| 12 |
+
"1": {
|
| 13 |
+
"content": "<pad>",
|
| 14 |
+
"lstrip": false,
|
| 15 |
+
"normalized": false,
|
| 16 |
+
"rstrip": false,
|
| 17 |
+
"single_word": false,
|
| 18 |
+
"special": true
|
| 19 |
+
},
|
| 20 |
+
"2": {
|
| 21 |
+
"content": "</s>",
|
| 22 |
+
"lstrip": false,
|
| 23 |
+
"normalized": false,
|
| 24 |
+
"rstrip": false,
|
| 25 |
+
"single_word": false,
|
| 26 |
+
"special": true
|
| 27 |
+
},
|
| 28 |
+
"3": {
|
| 29 |
+
"content": "<unk>",
|
| 30 |
+
"lstrip": false,
|
| 31 |
+
"normalized": false,
|
| 32 |
+
"rstrip": false,
|
| 33 |
+
"single_word": false,
|
| 34 |
+
"special": true
|
| 35 |
+
},
|
| 36 |
+
"50264": {
|
| 37 |
+
"content": "<mask>",
|
| 38 |
+
"lstrip": true,
|
| 39 |
+
"normalized": false,
|
| 40 |
+
"rstrip": false,
|
| 41 |
+
"single_word": false,
|
| 42 |
+
"special": true
|
| 43 |
+
}
|
| 44 |
+
},
|
| 45 |
+
"bos_token": "<s>",
|
| 46 |
+
"clean_up_tokenization_spaces": false,
|
| 47 |
+
"cls_token": "<s>",
|
| 48 |
+
"eos_token": "</s>",
|
| 49 |
+
"errors": "replace",
|
| 50 |
+
"extra_special_tokens": {},
|
| 51 |
+
"mask_token": "<mask>",
|
| 52 |
+
"max_length": 128,
|
| 53 |
+
"model_max_length": 512,
|
| 54 |
+
"pad_to_multiple_of": null,
|
| 55 |
+
"pad_token": "<pad>",
|
| 56 |
+
"pad_token_type_id": 0,
|
| 57 |
+
"padding_side": "right",
|
| 58 |
+
"sep_token": "</s>",
|
| 59 |
+
"stride": 0,
|
| 60 |
+
"tokenizer_class": "RobertaTokenizer",
|
| 61 |
+
"trim_offsets": true,
|
| 62 |
+
"truncation_side": "right",
|
| 63 |
+
"truncation_strategy": "longest_first",
|
| 64 |
+
"unk_token": "<unk>"
|
| 65 |
+
}
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|