<-->

bright

import torch
import clip
import numpy as np
from PIL import Image

device = "cuda" if torch.cuda.is_available() else "cpu"

# 1. CLIP 모델 로드
model, preprocess = clip.load("ViT-B/32", device=device)
model.eval()

# 2. 타겟 이미지 로딩
target_image = Image.open("oiia.png").convert("RGB")
target_tensor = preprocess(target_image).unsqueeze(0).to(device)

with torch.no_grad():
    target_features = model.encode_image(target_tensor)
    target_features = target_features / target_features.norm(dim=-1, keepdim=True)

target_features_np = target_features[0].detach().cpu().numpy().astype(np.float64)
target_features_np /= np.linalg.norm(target_features_np)

# 3. 초기 이미지 (흰색)
initial_image = torch.full((1, 3, 224, 224), 1.0, requires_grad=True, device=device)

# 4. Normalize 함수
def normalize_for_clip(img_tensor):
    mean = torch.tensor([0.48145466, 0.4578275, 0.40821073], device=img_tensor.device).view(1, 3, 1, 1)
    std  = torch.tensor([0.26862954, 0.26130258, 0.27577711], device=img_tensor.device).view(1, 3, 1, 1)
    return (img_tensor - mean) / std

# 5. 최적화 설정
optimizer = torch.optim.Adam([initial_image], lr=6e-3)
num_iterations = 5000
best_similarity = -1.0
best_img = None

# 6. 학습 루프
for step in range(num_iterations):
    img_clamped = torch.clamp(initial_image, 0.0, 1.0)
    normalized_img = normalize_for_clip(img_clamped)

    current_features = model.encode_image(normalized_img)
    current_features = current_features / current_features.norm(dim=-1, keepdim=True)

    # torch 유사도 (gradient 유지)
    similarity = (current_features * target_features).sum()
    loss = -similarity  # 밝기 페널티 없이 원래 방식대로

    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    # 하드하게 밝기 유지
    with torch.no_grad():
        if initial_image.mean() < 0.98:
            diff = 0.98 - initial_image.mean()
            initial_image += diff

    # 평가용 np 유사도
    feat_np = current_features[0].detach().cpu().numpy().astype(np.float64)
    feat_np /= np.linalg.norm(feat_np)
    true_similarity = np.dot(feat_np, target_features_np)

    if true_similarity > best_similarity:
        best_similarity = true_similarity
        best_img = img_clamped.detach().clone()

    if step % 50 == 0 or step == num_iterations - 1:
        print(f"Step {step}/{num_iterations}, np_sim={true_similarity:.6f}, torch_sim={similarity.item():.6f}, mean={img_clamped.mean().item() * 255:.2f}")

    if true_similarity >= 0.99999:
        print(f"\n✅ Reached similarity {true_similarity:.6f} at step {step}")
        break

# 7. 후처리 및 저장
if best_img is None:
    best_img = torch.clamp(initial_image, 0.0, 1.0).detach()

final_img_np = best_img[0].cpu().numpy() * 255
final_img_np = np.moveaxis(final_img_np, 0, -1)
mean_val = final_img_np.mean()

if mean_val < 250:
    scale_factor = 250.0 / mean_val
    final_img_np = np.clip(final_img_np * scale_factor, 0, 255)

final_img = Image.fromarray(final_img_np.astype(np.uint8))
final_img.save("adversarial_scaled.png", format="PNG", compress_level=0)

print(f"\n🎯 Final similarity: {best_similarity:.6f}")
print(f"🧪 Final mean pixel: {final_img_np.mean():.2f}")
print("✅ Saved to: adversarial_scaled.png")

 

 


rotceteD TPG

 

https://gist.github.com/kangsangsoo/8ae40743dac7a4c9af4ac4f05e6ee60c

 


import itertools
from Crypto.Cipher import AES
from multiprocessing import Pool, cpu_count
from math import comb
import os

# cands 세팅
cand = ['000.txt', '001.txt', '002.txt', '003.txt', '004.txt', '006.txt', '008.txt', '009.txt', '010.txt', '011.txt', '013.txt', '014.txt', '018.txt', '019.txt', '022.txt', '023.txt', '025.txt', '026.txt', '028.txt', '029.txt', '030.txt', '031.txt', '033.txt', '035.txt', '037.txt', '038.txt', '040.txt', '041.txt', '043.txt', '044.txt', '045.txt', '047.txt', '051.txt', '054.txt', '055.txt', '056.txt', '057.txt', '058.txt', '060.txt', '061.txt', '063.txt', '064.txt', '065.txt', '067.txt', '075.txt', '077.txt', '079.txt', '083.txt', '084.txt', '085.txt', '089.txt', '092.txt', '097.txt', '098.txt', '100.txt', '104.txt', '106.txt', '110.txt', '112.txt', '113.txt', '114.txt', '119.txt', '120.txt', '122.txt', '123.txt', '124.txt', '125.txt', '126.txt', '127.txt']
cands = [int(x.split('.')[0]) for x in cand]

# AES 세팅
nonce = bytes.fromhex('3d6b85f9299442b2219a44aee1345e16')
ciphertext = bytes.fromhex('c88f0e97fbe289c7800a68c2aae64a1825e0405cca87f6360e5f194e43978e1772f09a5bd2812cf9db8cf9008be7e34222ed9ee22bf6188358a49ada4e6d5ae16e71b0807d414f')
tag = bytes.fromhex('c58e546b2fed995d0a6a723c8f10f6d1')

# 개별 조합을 받아서 복호화 시도
def try_decrypt(combo):
    key = 0
    combos = []
    for i in combo:
        combos.append(cands[i])
    for i in combos:
        assert i in cands
    assert len(set(combos)) == 64
    for j in range(128):
        key *= 2
        if j in combos:
            key = key + 1
    key_bytes = key.to_bytes(16, byteorder='big')

    cipher = AES.new(key_bytes, AES.MODE_GCM, nonce=nonce)
    try:
        plaintext = cipher.decrypt_and_verify(ciphertext, tag)
        return plaintext  # 성공 시 리턴
    except:
        return None
    
from tqdm import tqdm
from itertools import combinations

if __name__ == '__main__':
    from tqdm import tqdm

    num_workers = cpu_count()
    print(f"[*] Using {num_workers} CPU cores.")

    numbers = list(range(len(cands)))  
    all_combos = itertools.combinations(numbers, 64)  

    

    with Pool(num_workers) as pool:
        for result in tqdm(pool.imap_unordered(try_decrypt, all_combos), total=comb(len(cands), 64), desc="Brute-forcing"):
            if result:
                print("\n[✓] 복호화 성공:", result.decode(errors='ignore'))
                pool.terminate()  # 성공 시 다른 프로세스 종료
                break

'writeups' 카테고리의 다른 글

hxp 38C3 CTF - respectable_nft  (0) 2024.12.30
lakectf 2025 qual - silly, hopfield  (0) 2024.12.08
2024 Blaz CTF - solana challs  (0) 2024.10.08
2024 codegate - sms  (0) 2024.09.04
SekaiCTF 2024 - solana  (0) 2024.08.26

+ Recent posts