장애사항 : Colab에서 기존 코드는 os 관련 함수가 호환이 어려워 동작이 안되는것으로 보입니다. 따라서 실험은 A6000x3 - Unbuntu x64 환경에서 진행

구현코드(private) : https://github.com/NAMUORI00/PIFI-ILM

<aside> 💡

ILM 구현부분 : PC 패칭 기법을 완전히 계승하지 않고 일부분만 계승

ILM 추출 실험 (bert - qwen 2_0.5b 모델 기준)

Cola

{
  "task": "classification",
  "dataset": "cola",
  "slm_type": "bert",
  "llm_type": "qwen2_0.5b",
  "n_samples": 400,
  "n_pcs": 16,
  "top_pc": 5,
  "effects": [
    0.014999999999999902,
    0.020000000000000018,
    0.0,
    0.03249999999999997,
    0.020000000000000018,
    0.025000000000000022,
    0.007499999999999951,
    0.022499999999999964,
    0.01749999999999996,
    0.025000000000000022,
    0.03249999999999997,
    0.04500000000000004,
    0.010000000000000009,
    0.020000000000000018,
    0.01749999999999996,
    0.025000000000000022,
    0.03749999999999998,
    0.030000000000000027,
    0.03249999999999997,
    0.030000000000000027,
    0.0050000000000000044,
    0.007499999999999951,
    0.02749999999999997,
    0.022499999999999964
  ],
  "best_llm_layer": 11,
  "seed": 2023
}

imdb

{
  "task": "classification",
  "dataset": "imdb",
  "slm_type": "bert",
  "llm_type": "qwen2_0.5b",
  "n_samples": 400,
  "n_pcs": 16,
  "top_pc": 5,
  "effects": [
    0.10999999999999999,
    0.07500000000000007,
    0.07499999999999996,
    0.11499999999999999,
    0.08499999999999996,
    0.14,
    0.125,
    0.15499999999999992,
    0.15000000000000002,
    0.17000000000000004,
    0.21999999999999997,
    0.23250000000000004,
    0.245,
    0.25,
    0.265,
    0.27,
    0.255,
    0.2025,
    0.24250000000000005,
    0.245,
    0.24750000000000005,
    0.21250000000000002,
    0.21250000000000002,
    0.2025
  ],
  "best_llm_layer": 15,
  "seed": 2023
}

sst

{
  "task": "classification",
  "dataset": "sst2",
  "slm_type": "bert",
  "llm_type": "qwen2_0.5b",
  "n_samples": 400,
  "n_pcs": 16,
  "top_pc": 5,
  "effects": [
    0.12250000000000005,
    0.11499999999999999,
    0.07499999999999996,
    0.135,
    0.12749999999999995,
    0.16249999999999998,
    0.1725000000000001,
    0.22250000000000003,
    0.21250000000000002,
    0.1925,
    0.20750000000000002,
    0.20999999999999996,
    0.23250000000000004,
    0.22499999999999998,
    0.22250000000000003,
    0.245,
    0.23250000000000004,
    0.22499999999999998,
    0.20999999999999996,
    0.20750000000000002,
    0.21499999999999997,
    0.1825,
    0.18999999999999995,
    0.16249999999999998
  ],
  "best_llm_layer": 15,
  "seed": 2023
}

tweet_offensive