feat: refactor key handling to replace ApiKey with GeminiKey across the codebase

main
Yoo1tic 2025-07-18 02:51:21 +08:00
parent 35b9d0b0b0
commit e6d2309d9e
5 changed files with 44 additions and 62 deletions

View File

@ -1,15 +1,11 @@
use crate::adapters::output::write_keys_to_file;
use crate::types::GeminiKey;
use anyhow::Result; use anyhow::Result;
use std::{ use std::{collections::HashSet, fs, path::Path, str::FromStr};
collections::HashSet,
fs,
path::Path,
str::FromStr,
};
use crate::types::ApiKey;
/// Load and validate API keys from a file /// Load and validate API keys from a file
/// Returns a vector of unique, valid API keys /// Returns a vector of unique, valid API keys
pub fn load_keys(path: &Path) -> Result<Vec<ApiKey>> { pub fn load_keys(path: &Path) -> Result<Vec<GeminiKey>> {
let keys_txt = fs::read_to_string(path)?; let keys_txt = fs::read_to_string(path)?;
// Use HashSet to automatically deduplicate keys // Use HashSet to automatically deduplicate keys
let unique_keys_set: HashSet<&str> = keys_txt let unique_keys_set: HashSet<&str> = keys_txt
@ -22,24 +18,18 @@ pub fn load_keys(path: &Path) -> Result<Vec<ApiKey>> {
let mut valid_keys_for_backup = Vec::new(); let mut valid_keys_for_backup = Vec::new();
for key_str in unique_keys_set { for key_str in unique_keys_set {
match ApiKey::from_str(key_str) { match GeminiKey::from_str(key_str) {
Ok(api_key) => { Ok(api_key) => {
keys.push(api_key.clone()); keys.push(api_key.clone());
valid_keys_for_backup.push(api_key.as_str().to_string()); valid_keys_for_backup.push(api_key.inner.clone());
} }
Err(e) => eprintln!("Skipping invalid key: {}", e), Err(e) => eprintln!("Skipping invalid key: {}", e),
} }
} }
// Write validated keys to backup.txt // Write validated keys to backup.txt
let backup_content = valid_keys_for_backup.join("\n"); if let Err(e) = write_keys_to_file(&valid_keys_for_backup, "backup.txt") {
if let Err(e) = fs::write("backup.txt", backup_content) {
eprintln!("Failed to write backup file: {}", e); eprintln!("Failed to write backup file: {}", e);
} else {
println!(
"Backup file created with {} valid keys",
valid_keys_for_backup.len()
);
} }
Ok(keys) Ok(keys)

View File

@ -1,4 +1,4 @@
use crate::types::ApiKey; use crate::types::GeminiKey;
use anyhow::Result; use anyhow::Result;
use std::{fs, io::Write}; use std::{fs, io::Write};
use tokio::io::{AsyncWriteExt, BufWriter}; use tokio::io::{AsyncWriteExt, BufWriter};
@ -7,16 +7,16 @@ use toml::Value;
// Write valid key to output file // Write valid key to output file
pub async fn write_keys_txt_file( pub async fn write_keys_txt_file(
file: &mut BufWriter<tokio::fs::File>, file: &mut BufWriter<tokio::fs::File>,
key: &ApiKey, key: &GeminiKey,
) -> Result<()> { ) -> Result<()> {
file.write_all(format!("{}\n", key.as_str()).as_bytes()).await?; file.write_all(format!("{}\n", key.as_ref()).as_bytes()).await?;
Ok(()) Ok(())
} }
// Write valid key to output file in Clewdr format // Write valid key to output file in Clewdr format
pub fn write_keys_clewdr_format(file: &mut fs::File, key: &ApiKey) -> Result<()> { pub fn write_keys_clewdr_format(file: &mut fs::File, key: &GeminiKey) -> Result<()> {
let mut table = toml::value::Table::new(); let mut table = toml::value::Table::new();
table.insert("key".to_string(), Value::String(key.as_str().to_string())); table.insert("key".to_string(), Value::String(key.as_ref().to_string()));
let gemini_keys = Value::Array(vec![Value::Table(table)]); let gemini_keys = Value::Array(vec![Value::Table(table)]);
let mut root = toml::value::Table::new(); let mut root = toml::value::Table::new();
@ -26,3 +26,11 @@ pub fn write_keys_clewdr_format(file: &mut fs::File, key: &ApiKey) -> Result<()>
write!(file, "{}", toml_string)?; write!(file, "{}", toml_string)?;
Ok(()) Ok(())
} }
// Write keys to a text file with custom filename
pub fn write_keys_to_file(keys: &[String], filename: &str) -> Result<()> {
let content = keys.join("\n");
fs::write(filename, content)?;
println!("File '{}' created with {} keys", filename, keys.len());
Ok(())
}

View File

@ -5,13 +5,13 @@ use serde_json;
use tokio::time::Duration; use tokio::time::Duration;
use url::Url; use url::Url;
use crate::types::{ApiKey, KeyStatus}; use crate::types::{GeminiKey, KeyStatus};
pub async fn validate_key_with_retry( pub async fn validate_key_with_retry(
client: Client, client: Client,
api_host: Url, api_host: Url,
key: ApiKey, key: GeminiKey,
) -> Option<ApiKey> { ) -> Option<GeminiKey> {
let retry_policy = ExponentialBuilder::default() let retry_policy = ExponentialBuilder::default()
.with_max_times(3) .with_max_times(3)
.with_min_delay(Duration::from_secs(3)) .with_min_delay(Duration::from_secs(3))
@ -19,17 +19,17 @@ pub async fn validate_key_with_retry(
let result = (async || match keytest(client.to_owned(), &api_host, &key).await { let result = (async || match keytest(client.to_owned(), &api_host, &key).await {
Ok(KeyStatus::Valid) => { Ok(KeyStatus::Valid) => {
println!("Key: {}... -> SUCCESS", &key.as_str()[..10]); println!("Key: {}... -> SUCCESS", &key.as_ref()[..10]);
Ok(Some(key.clone())) Ok(Some(key.clone()))
} }
Ok(KeyStatus::Invalid) => { Ok(KeyStatus::Invalid) => {
println!("Key: {}... -> INVALID (Forbidden)", &key.as_str()[..10]); println!("Key: {}... -> INVALID (Forbidden)", &key.as_ref()[..10]);
Ok(None) Ok(None)
} }
Ok(KeyStatus::Retryable(reason)) => { Ok(KeyStatus::Retryable(reason)) => {
eprintln!( eprintln!(
"Key: {}... -> RETRYABLE (Reason: {})", "Key: {}... -> RETRYABLE (Reason: {})",
&key.as_str()[..10], &key.as_ref()[..10],
reason reason
); );
Err(anyhow::anyhow!("Retryable error: {}", reason)) Err(anyhow::anyhow!("Retryable error: {}", reason))
@ -37,7 +37,7 @@ pub async fn validate_key_with_retry(
Err(e) => { Err(e) => {
eprintln!( eprintln!(
"Key: {}... -> NETWORK ERROR (Reason: {})", "Key: {}... -> NETWORK ERROR (Reason: {})",
&key.as_str()[..10], &key.as_ref()[..10],
e e
); );
Err(e) Err(e)
@ -51,14 +51,14 @@ pub async fn validate_key_with_retry(
Err(_) => { Err(_) => {
eprintln!( eprintln!(
"Key: {}... -> FAILED after all retries.", "Key: {}... -> FAILED after all retries.",
&key.as_str()[..10] &key.as_ref()[..10]
); );
None None
} }
} }
} }
async fn keytest(client: Client, api_host: &Url, key: &ApiKey) -> Result<KeyStatus> { async fn keytest(client: Client, api_host: &Url, key: &GeminiKey) -> Result<KeyStatus> {
const API_PATH: &str = "v1beta/models/gemini-2.0-flash-exp:generateContent"; const API_PATH: &str = "v1beta/models/gemini-2.0-flash-exp:generateContent";
let full_url = api_host.join(API_PATH)?; let full_url = api_host.join(API_PATH)?;
@ -77,7 +77,7 @@ async fn keytest(client: Client, api_host: &Url, key: &ApiKey) -> Result<KeyStat
let response = client let response = client
.post(full_url) .post(full_url)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.header("X-goog-api-key", key.as_str()) .header("X-goog-api-key", key.as_ref())
.json(&request_body) .json(&request_body)
.send() .send()
.await?; .await?;

View File

@ -10,36 +10,22 @@ pub enum KeyStatus {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ApiKey { pub struct GeminiKey {
inner: String, pub inner: String,
} }
impl ApiKey { impl AsRef<str> for GeminiKey {
pub fn as_str(&self) -> &str { fn as_ref(&self) -> &str {
&self.inner &self.inner
} }
} }
#[derive(Debug)] impl FromStr for GeminiKey {
pub enum KeyValidationError { type Err = &'static str;
InvalidFormat(String),
}
impl std::fmt::Display for KeyValidationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
KeyValidationError::InvalidFormat(msg) => write!(f, "Invalid key format: {}", msg),
}
}
}
impl std::error::Error for KeyValidationError {}
impl FromStr for ApiKey {
type Err = KeyValidationError;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^AIzaSy.{33}$").unwrap()); static RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^AIzaSy[A-Za-z0-9_-]{33}$").unwrap());
let cleaned = s.trim(); let cleaned = s.trim();
@ -48,9 +34,7 @@ impl FromStr for ApiKey {
inner: cleaned.to_string(), inner: cleaned.to_string(),
}) })
} else { } else {
Err(KeyValidationError::InvalidFormat( Err("Invalid Google API key format")
"Google API key must start with 'AIzaSy' followed by 33 characters".to_string(),
))
} }
} }
} }

View File

@ -8,7 +8,7 @@ use tokio::{fs, io::AsyncWriteExt, sync::mpsc};
use crate::adapters::write_keys_txt_file; use crate::adapters::write_keys_txt_file;
use crate::config::KeyCheckerConfig; use crate::config::KeyCheckerConfig;
use crate::key_validator::validate_key_with_retry; use crate::key_validator::validate_key_with_retry;
use crate::types::ApiKey; use crate::types::GeminiKey;
pub struct ValidationService { pub struct ValidationService {
config: KeyCheckerConfig, config: KeyCheckerConfig,
@ -20,11 +20,11 @@ impl ValidationService {
Self { config, client } Self { config, client }
} }
pub async fn validate_keys(&self, keys: Vec<ApiKey>) -> Result<()> { pub async fn validate_keys(&self, keys: Vec<GeminiKey>) -> Result<()> {
let start_time = Instant::now(); let start_time = Instant::now();
// Create channel for streaming keys from producer to consumer // Create channel for streaming keys from producer to consumer
let (tx, mut rx) = mpsc::unbounded_channel::<ApiKey>(); let (tx, mut rx) = mpsc::unbounded_channel::<GeminiKey>();
let stream = stream! { let stream = stream! {
while let Some(item) = rx.recv().await { while let Some(item) = rx.recv().await {
yield item; yield item;
@ -53,7 +53,7 @@ impl ValidationService {
// Process validated keys and write to output file // Process validated keys and write to output file
while let Some(valid_key) = valid_keys_stream.next().await { while let Some(valid_key) = valid_keys_stream.next().await {
println!("Valid key found: {}", valid_key.as_str()); println!("Valid key found: {}", valid_key.as_ref());
if let Err(e) = write_keys_txt_file(&mut buffer_writer, &valid_key).await { if let Err(e) = write_keys_txt_file(&mut buffer_writer, &valid_key).await {
eprintln!("Failed to write key to output file: {}", e); eprintln!("Failed to write key to output file: {}", e);
} }