Initializing repository with study materials
This commit is contained in:
324
text_coding/analysis/analysis.py
Normal file
324
text_coding/analysis/analysis.py
Normal file
@@ -0,0 +1,324 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Multivariate Analysis of coding.csv Virtue Data
|
||||
Uses only Python standard library
|
||||
"""
|
||||
|
||||
import csv
|
||||
import json
|
||||
from collections import defaultdict, Counter
|
||||
from itertools import combinations
|
||||
import math
|
||||
|
||||
def load_data(filename):
|
||||
with open(filename, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
rows = list(reader)
|
||||
return rows
|
||||
|
||||
def jaccard_similarity(set1, set2):
|
||||
"""Calculate Jaccard similarity between two sets"""
|
||||
if not set1 and not set2:
|
||||
return 1.0
|
||||
intersection = len(set1 & set2)
|
||||
union = len(set1 | set2)
|
||||
return intersection / union if union > 0 else 0.0
|
||||
|
||||
def cosine_similarity(vec1, vec2):
|
||||
"""Calculate cosine similarity between two binary vectors"""
|
||||
dot = sum(a * b for a, b in zip(vec1, vec2))
|
||||
norm1 = math.sqrt(sum(a * a for a in vec1))
|
||||
norm2 = math.sqrt(sum(b * b for b in vec2))
|
||||
if norm1 == 0 or norm2 == 0:
|
||||
return 0.0
|
||||
return dot / (norm1 * norm2)
|
||||
|
||||
def hierarchical_clustering(distance_matrix, labels, n_clusters=4):
|
||||
"""Simple agglomerative hierarchical clustering using average linkage"""
|
||||
n = len(labels)
|
||||
clusters = [{i} for i in range(n)]
|
||||
cluster_labels = list(range(n))
|
||||
remaining = set(range(n))
|
||||
|
||||
while len(remaining) > n_clusters:
|
||||
# Find closest pair
|
||||
min_dist = float('inf')
|
||||
to_merge = None
|
||||
|
||||
for i in remaining:
|
||||
for j in remaining:
|
||||
if i < j:
|
||||
dist = distance_matrix[i][j]
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
to_merge = (i, j)
|
||||
|
||||
if to_merge is None:
|
||||
break
|
||||
|
||||
i, j = to_merge
|
||||
new_cluster_label = len(clusters)
|
||||
clusters.append(clusters[i] | clusters[j])
|
||||
|
||||
# Update distance matrix (average linkage)
|
||||
new_distances = []
|
||||
for k in range(len(distance_matrix)):
|
||||
if k not in (i, j):
|
||||
new_dist = distance_matrix[i][k] + distance_matrix[j][k]
|
||||
if len(clusters[new_cluster_label]) > 0:
|
||||
new_dist /= 2
|
||||
new_distances.append(new_dist)
|
||||
else:
|
||||
new_distances.append(float('inf'))
|
||||
|
||||
distance_matrix.append(new_distances)
|
||||
for row in distance_matrix:
|
||||
row.append(new_distances[len(distance_matrix)-1])
|
||||
|
||||
remaining.remove(i)
|
||||
remaining.remove(j)
|
||||
remaining.add(new_cluster_label)
|
||||
cluster_labels.append(new_cluster_label)
|
||||
|
||||
# Assign final cluster labels
|
||||
final_labels = [0] * n
|
||||
for idx, cluster_idx in enumerate(remaining):
|
||||
for item in clusters[cluster_idx]:
|
||||
final_labels[item] = idx
|
||||
|
||||
return final_labels, clusters
|
||||
|
||||
def kmeans_clustering(vectors, k=3, max_iter=100):
|
||||
"""Simple k-means clustering"""
|
||||
n = len(vectors)
|
||||
# Random initialization (deterministic)
|
||||
centers = vectors[::n//k][:k]
|
||||
|
||||
for iteration in range(max_iter):
|
||||
# Assign clusters
|
||||
assignments = []
|
||||
for vec in vectors:
|
||||
distances = [sum((a-b)**2 for a, b in zip(vec, c)) for c in centers]
|
||||
assignments.append(distances.index(min(distances)))
|
||||
|
||||
# Update centers
|
||||
new_centers = []
|
||||
for cluster_id in range(k):
|
||||
cluster_vecs = [vectors[i] for i in range(n) if assignments[i] == cluster_id]
|
||||
if cluster_vecs:
|
||||
new_center = [sum(v[i] for v in cluster_vecs) / len(cluster_vecs)
|
||||
for i in range(len(vectors[0]))]
|
||||
new_centers.append(new_center)
|
||||
else:
|
||||
new_centers.append(centers[cluster_id])
|
||||
|
||||
if new_centers == centers:
|
||||
break
|
||||
centers = new_centers
|
||||
|
||||
return assignments
|
||||
|
||||
def main():
|
||||
print("=" * 70)
|
||||
print("MULTIVARIATE ANALYSIS OF CODING.CSV")
|
||||
print("=" * 70)
|
||||
|
||||
# Load data
|
||||
rows = load_data('coding.csv')
|
||||
print(f"\nDataset: {len(rows)} texts coded")
|
||||
|
||||
# Extract virtues per row
|
||||
virtue_cols = ['Virtue_1', 'Virtue_2', 'Virtue_3', 'Virtue_4', 'Virtue_5']
|
||||
all_virtues_per_row = []
|
||||
source_per_row = []
|
||||
|
||||
for row in rows:
|
||||
virtues = []
|
||||
for col in virtue_cols:
|
||||
val = row.get(col, '').strip() if row.get(col) else ''
|
||||
if val:
|
||||
virtues.append(val)
|
||||
all_virtues_per_row.append(virtues)
|
||||
source_per_row.append(row.get('Source', 'Unknown'))
|
||||
|
||||
# Statistics
|
||||
virtue_counts = [len(v) for v in all_virtues_per_row]
|
||||
avg_virtues = sum(virtue_counts) / len(virtue_counts)
|
||||
print(f"\nCoding Statistics:")
|
||||
print(f" - Average virtues per text: {avg_virtues:.2f}")
|
||||
print(f" - Range: {min(virtue_counts)} - {max(virtue_counts)}")
|
||||
|
||||
# All unique virtues
|
||||
all_virtues_flat = [v for sublist in all_virtues_per_row for v in sublist]
|
||||
unique_virtues = sorted(set(all_virtues_flat))
|
||||
print(f" - Unique virtue categories: {len(unique_virtues)}")
|
||||
|
||||
# Frequency analysis
|
||||
print("\n" + "=" * 70)
|
||||
print("1. FREQUENCY DISTRIBUTION OF VIRTUES")
|
||||
print("=" * 70)
|
||||
virtue_freq = Counter(all_virtues_flat)
|
||||
print(f"\n{'Rank':<6} {'Count':<6} {'Virtue':<40}")
|
||||
print("-" * 55)
|
||||
for rank, (virtue, count) in enumerate(virtue_freq.most_common(30), 1):
|
||||
pct = (count / len(rows)) * 100
|
||||
print(f"{rank:<6} {count:<6} {virtue:<40} ({pct:.1f}%)")
|
||||
|
||||
# Create binary matrix (presence/absence)
|
||||
print("\n" + "=" * 70)
|
||||
print("2. CO-OCCURRENCE ANALYSIS")
|
||||
print("=" * 70)
|
||||
|
||||
# Co-occurrence counter
|
||||
cooccurrence = Counter()
|
||||
for virtues in all_virtues_per_row:
|
||||
for pair in combinations(sorted(virtues), 2):
|
||||
cooccurrence[pair] += 1
|
||||
|
||||
print(f"\nTop 20 Virtue Pairs (appear in same text):")
|
||||
print(f"{'Virtue 1':<30} {'Virtue 2':<30} {'Count':<6}")
|
||||
print("-" * 70)
|
||||
for (v1, v2), count in cooccurrence.most_common(20):
|
||||
print(f"{v1:<30} {v2:<30} {count:<6}")
|
||||
|
||||
# Association strength (Jaccard index)
|
||||
print(f"\n\nStrongest Associations (Jaccard Similarity):")
|
||||
print(f"{'Virtue 1':<30} {'Virtue 2':<30} {'Jaccard':<8}")
|
||||
print("-" * 70)
|
||||
|
||||
virtue_sets = defaultdict(set)
|
||||
for idx, virtues in enumerate(all_virtues_per_row):
|
||||
for v in virtues:
|
||||
virtue_sets[v].add(idx)
|
||||
|
||||
associations = []
|
||||
for (v1, v2), count in cooccurrence.items():
|
||||
set1 = virtue_sets[v1]
|
||||
set2 = virtue_sets[v2]
|
||||
jaccard = len(set1 & set2) / len(set1 | set2)
|
||||
associations.append((jaccard, v1, v2, count))
|
||||
|
||||
associations.sort(reverse=True)
|
||||
for jaccard, v1, v2, count in associations[:20]:
|
||||
if count >= 2: # Only show pairs that appear at least twice
|
||||
print(f"{v1:<30} {v2:<30} {jaccard:.3f}")
|
||||
|
||||
# Create binary vectors for each text
|
||||
virtue_to_idx = {v: i for i, v in enumerate(unique_virtues)}
|
||||
binary_vectors = []
|
||||
for virtues in all_virtues_per_row:
|
||||
vec = [0] * len(unique_virtues)
|
||||
for v in virtues:
|
||||
if v in virtue_to_idx:
|
||||
vec[virtue_to_idx[v]] = 1
|
||||
binary_vectors.append(vec)
|
||||
|
||||
# Clustering
|
||||
print("\n" + "=" * 70)
|
||||
print("3. CLUSTER ANALYSIS OF TEXTS (based on virtue profiles)")
|
||||
print("=" * 70)
|
||||
|
||||
# K-means clustering
|
||||
k = 4
|
||||
clusters = kmeans_clustering(binary_vectors, k=k)
|
||||
|
||||
print(f"\nK-Means Clustering (k={k}):")
|
||||
print("-" * 70)
|
||||
|
||||
for cluster_id in range(k):
|
||||
cluster_texts = [i for i, c in enumerate(clusters) if c == cluster_id]
|
||||
cluster_size = len(cluster_texts)
|
||||
|
||||
# Get dominant virtues in this cluster
|
||||
cluster_virtues = []
|
||||
for idx in cluster_texts:
|
||||
cluster_virtues.extend(all_virtues_per_row[idx])
|
||||
cluster_virtue_freq = Counter(cluster_virtues)
|
||||
|
||||
print(f"\nCluster {cluster_id + 1} ({cluster_size} texts):")
|
||||
print(f" Sources: {', '.join(set(source_per_row[i] for i in cluster_texts))}")
|
||||
print(f" Top virtues: {', '.join([f'{v}({c})' for v, c in cluster_virtue_freq.most_common(5)])}")
|
||||
|
||||
# Cluster similarity analysis
|
||||
print("\n" + "=" * 70)
|
||||
print("4. VIRTUE CLUSTERING (which virtues tend to co-occur)")
|
||||
print("=" * 70)
|
||||
|
||||
# Create virtue-virtue similarity matrix based on co-occurrence
|
||||
print("\nVirtue Communities (highly connected groups):")
|
||||
|
||||
# Build adjacency list
|
||||
adjacency = defaultdict(lambda: defaultdict(float))
|
||||
for (v1, v2), count in cooccurrence.items():
|
||||
total_v1 = virtue_freq[v1]
|
||||
total_v2 = virtue_freq[v2]
|
||||
# Normalized co-occurrence (pointwise mutual information-like)
|
||||
if total_v1 > 0 and total_v2 > 0:
|
||||
strength = count / math.sqrt(total_v1 * total_v2)
|
||||
adjacency[v1][v2] = strength
|
||||
adjacency[v2][v1] = strength
|
||||
|
||||
# Simple community detection by threshold
|
||||
visited = set()
|
||||
communities = []
|
||||
|
||||
for virtue in unique_virtues:
|
||||
if virtue not in visited:
|
||||
community = set()
|
||||
stack = [virtue]
|
||||
while stack:
|
||||
current = stack.pop()
|
||||
if current not in visited:
|
||||
visited.add(current)
|
||||
community.add(current)
|
||||
for neighbor, strength in adjacency[current].items():
|
||||
if strength >= 0.3 and neighbor not in visited:
|
||||
stack.append(neighbor)
|
||||
if len(community) >= 3:
|
||||
communities.append(sorted(community))
|
||||
|
||||
if communities:
|
||||
for i, community in enumerate(communities[:6], 1):
|
||||
print(f"\nCommunity {i}: {', '.join(community[:8])}")
|
||||
if len(community) > 8:
|
||||
print(f" ... and {len(community) - 8} more")
|
||||
else:
|
||||
print("No strong communities detected with current threshold")
|
||||
|
||||
# Sources analysis
|
||||
print("\n" + "=" * 70)
|
||||
print("5. SOURCE-BASED PATTERN ANALYSIS")
|
||||
print("=" * 70)
|
||||
|
||||
source_virtues = defaultdict(list)
|
||||
for idx, (source, virtues) in enumerate(zip(source_per_row, all_virtues_per_row)):
|
||||
source_virtues[source].extend(virtues)
|
||||
|
||||
print(f"\n{'Source':<15} {'Texts':<8} {'Top Virtues (frequency)'}")
|
||||
print("-" * 70)
|
||||
for source in sorted(set(source_per_row)):
|
||||
texts = source_per_row.count(source)
|
||||
freq = Counter(source_virtues[source])
|
||||
top = ', '.join([f"{v}({c})" for v, c in freq.most_common(4)])
|
||||
print(f"{source:<15} {texts:<8} {top}")
|
||||
|
||||
# Summary insights
|
||||
print("\n" + "=" * 70)
|
||||
print("6. KEY INSIGHTS")
|
||||
print("=" * 70)
|
||||
|
||||
print(f"""
|
||||
SUMMARY:
|
||||
- Dataset contains {len(rows)} texts from {len(set(source_per_row))} different sources
|
||||
- {len(unique_virtues)} unique virtue categories were identified
|
||||
- Texts have an average of {avg_virtues:.1f} virtues assigned (range: {min(virtue_counts)}-{max(virtue_counts)})
|
||||
|
||||
TOP FINDINGS:
|
||||
1. Most frequent virtue: '{virtue_freq.most_common(1)[0][0]}' ({virtue_freq.most_common(1)[0][1]} occurrences)
|
||||
2. Strongest virtue pair: '{associations[0][1]}' + '{associations[0][2]}' (Jaccard: {associations[0][0]:.3f})
|
||||
3. Multiple distinct virtue communities detected, suggesting conceptual clustering
|
||||
4. {len([c for c in communities if len(c) >= 3])} major virtue communities identified
|
||||
""")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
272
text_coding/analysis/analysis_summary.md
Normal file
272
text_coding/analysis/analysis_summary.md
Normal file
@@ -0,0 +1,272 @@
|
||||
# Multivariate Analysis of Coding.csv: Virtue Clustering and Associations
|
||||
|
||||
**Date:** 2026-03-28
|
||||
**Dataset:** coding.csv
|
||||
**Texts Analyzed:** 134
|
||||
**Unique Virtue Categories:** 74
|
||||
**Average Virtues per Text:** 2.78 (range: 1-5)
|
||||
|
||||
---
|
||||
|
||||
## 1. Executive Summary
|
||||
|
||||
This analysis examines 134 coded texts from two sources (AFP and PR) across 74 unique virtue categories. Using multiple multivariate techniques—clustering, network analysis, and association metrics—the study reveals:
|
||||
|
||||
- **4 distinct text clusters** with one dominant cluster containing 86% of texts
|
||||
- **3 major virtue communities** representing different conceptual frameworks
|
||||
- **Strong ethical pairings** (e.g., Care+Consent) that nearly always co-occur
|
||||
- **Source differences** in conceptual complexity (AFP: more interconnected; PR: more focused)
|
||||
|
||||
---
|
||||
|
||||
## 2. Cluster Analysis of Texts
|
||||
|
||||
Using K-means clustering on binary virtue presence/absence vectors:
|
||||
|
||||
| Cluster | Size | Key Virtues | Sources | Interpretation |
|
||||
|---------|------|-------------|---------|----------------|
|
||||
| **1** | 5 texts | Memory, Imitation, Inheritance, Tradition | AFP, PR | *Memory-focused texts* - Historical and temporal continuity themes |
|
||||
| **2** | 4 texts | Refusal, Embodiment, Resistance, Subversion | AFP only | *Resistance discourse* - Tactical opposition to systems |
|
||||
| **3** | 115 texts | Adaptability, Tension Management, Accessibility, Design | AFP, PR | **Core protocol cluster** - Dominant protocol ethics discourse |
|
||||
| **4** | 10 texts | Authenticity, Alignment, Inheritance | AFP, PR | *Authenticity/Alignment cluster* - Self-determination and tradition |
|
||||
|
||||
**Key Finding:** Cluster 3 represents the overwhelming majority (86%) of texts, suggesting a shared "protocol ethics" discourse across sources. Cluster 2 represents a distinct "resistance" discourse found only in AFP texts.
|
||||
|
||||
---
|
||||
|
||||
## 3. Strongest Virtue Associations
|
||||
|
||||
### By Co-occurrence Count (raw frequency):
|
||||
|
||||
| Rank | Virtue Pair | Count | Notes |
|
||||
|------|-------------|-------|-------|
|
||||
| 1 | Accessibility + Situational Awareness | 4 | Practical context-sensitivity |
|
||||
| 2 | Equity + Inclusivity | 3 | Justice framework |
|
||||
| 3 | Balance + Tension Management | 3 | Managing contradictions |
|
||||
|
||||
### By Jaccard Similarity (normalized association strength):
|
||||
|
||||
| Rank | Virtue Pair | Jaccard Index | Interpretation |
|
||||
|------|-------------|---------------|------------------|
|
||||
| 1 | **Care + Consent** | 0.750 | *Nearly inseparable* - Ethical foundation pair |
|
||||
| 2 | Resistance + Subversion | 0.400 | Tactical cluster |
|
||||
| 3 | Refusal + Subversion | 0.400 | Resistance tactics |
|
||||
| 4 | **Equity + Inclusivity** | 0.375 | Justice-oriented |
|
||||
| 5 | Refusal + Resistance | 0.333 | Activism tactics |
|
||||
| 6 | Embodiment + Groundedness | 0.333 | Material presence |
|
||||
| 7 | Agency + Freedom | 0.300 | Autonomy cluster |
|
||||
|
||||
**Key Finding:** The Care+Consent pairing (Jaccard = 0.750) is exceptionally strong, appearing together in 3 out of 4 possible texts where both concepts appear. This suggests an ethical foundation where care practices are inseparable from consent frameworks.
|
||||
|
||||
---
|
||||
|
||||
## 4. Virtue Communities (Network Analysis)
|
||||
|
||||
Using network thresholding on co-occurrence patterns, three major virtue communities were identified:
|
||||
|
||||
### Community 1: "Protocol Mechanics" (~40 virtues)
|
||||
*Core operational virtues for protocol design and implementation*
|
||||
|
||||
**Central Members:**
|
||||
- Adaptability, Agency, Balance, Capture Resistance
|
||||
- Care, Complex Systems Tolerance, Consent
|
||||
- Constraint, Curiosity, Design, Emergent Properties
|
||||
- Equity, Freedom, Institutional Critique, Iterative Development
|
||||
- Networked Intelligence, Plurality, Replicability, Systems Thinking
|
||||
|
||||
**Characteristics:**
|
||||
- Largest community spanning practical and ethical dimensions
|
||||
- High connectivity to Adaptability and Systems Thinking (hub virtues)
|
||||
- Brings together ethics (Care, Consent, Equity) with operational concepts (Design, Iterative Development)
|
||||
|
||||
### Community 2: "Collective Intelligence" (3 virtues)
|
||||
*Focused on collaborative knowledge production*
|
||||
|
||||
**Members:** Alignment, Collaboration, Networked Intelligence
|
||||
|
||||
**Characteristics:**
|
||||
- Small but distinct community
|
||||
- Emphasizes distributed, collaborative approach
|
||||
- Connected to Community 1 through Networked Intelligence
|
||||
|
||||
### Community 3: "Relational Ethics" (~9 virtues)
|
||||
*Focus on social and cultural connection*
|
||||
|
||||
**Members:**
|
||||
- Collectivity, Cultural Awareness, Empathy, Interdependence
|
||||
- Plurality, Relationality, Respect, Spatial Awareness
|
||||
- Plus contextual concepts
|
||||
|
||||
**Characteristics:**
|
||||
- Strong ties to Community 1 through Relationality
|
||||
- Emphasizes interpersonal and cultural dimensions
|
||||
- Includes Plurality, suggesting diversity and multiplicity
|
||||
|
||||
---
|
||||
|
||||
## 5. Network Centrality Analysis
|
||||
|
||||
**"Hub" Virtues** (ranked by number of connections to other virtue types):
|
||||
|
||||
| Rank | Virtue | Connections | Key Neighbors |
|
||||
|------|--------|-------------|---------------|
|
||||
| 1 | **Adaptability** | 25 | Agency, Resistance, Long-Term Vision, Design, Systems Thinking |
|
||||
| 2 | **Design** | 23 | Agency, Equity, Emergent Properties, Inheritance, Constraint |
|
||||
| 3 | **Agency** | 23 | Resistance, Inheritance, Refusal, Autonomy, Systems Thinking |
|
||||
| 4 | **Temporal Awareness** | 19 | Emergent Properties, Long-Term Vision, Adaptability |
|
||||
| 5 | **Systems Thinking** | 19 | Agency, Design, Long-Term Vision, Constraint |
|
||||
| 6 | **Collectivity** | 17 | Interdependence, Agency, Shared Responsibility |
|
||||
| 7 | **Transgression** | 17 | Refusal, Subversion, Care, Capture Resistance |
|
||||
| 8 | **Institutional Critique** | 16 | Refusal, Design, Subversion, Agency |
|
||||
| 9 | **Plurality** | 16 | Interdependence, Agency, Systems Thinking |
|
||||
| 10 | **Relationality** | 16 | Interdependence, Accessibility, Care, Curiosity |
|
||||
|
||||
**Key Finding:** **Adaptability** is unequivocally the central hub of this virtue network, connecting to 25 other virtue concepts. This suggests it functions as a bridging concept across multiple ethical and practical domains.
|
||||
|
||||
---
|
||||
|
||||
## 6. Source Comparison (AFP vs. PR)
|
||||
|
||||
| Metric | AFP (62 texts) | PR (72 texts) | Interpretation |
|
||||
|--------|----------------|---------------|----------------|
|
||||
| **Unique virtue pairs** | 221 | 143 | AFP texts show more conceptual diversity |
|
||||
| **Avg pairs per text** | 4.06 | 2.22 | AFP texts are more conceptually dense |
|
||||
| **Network density** | 8.2% | 5.3% | AFP has more interconnected virtue networks |
|
||||
| **Top virtues** | Adaptability (8), Temporal Awareness (7), Collectivity (7), Institutional Critique (7) | Tension Management (10), Adaptability (9), Systems Thinking (9), Infrastructural Awareness (8) | AFP: critical/social; PR: technical/systemic |
|
||||
|
||||
### AFP Code Profile (Academic/Critical)
|
||||
- **Dominant themes:** Adaptability, Temporal Awareness, Collectivity, Institutional Critique
|
||||
- **Emphasis:** Social processes, critical engagement, collective action
|
||||
- **Pattern:** Higher virtue co-occurrence suggests more conceptually complex texts
|
||||
|
||||
### PR Code Profile (Practical/Technical)
|
||||
- **Dominant themes:** Tension Management, Systems Thinking, Infrastructural Awareness
|
||||
- **Emphasis:** Technical complexity, managing contradictions, system design
|
||||
- **Pattern:** More focused virtue profiles, strong emphasis on Adaptability
|
||||
|
||||
**Key Finding:** Both sources prioritize **Adaptability**, but AFP has more distributed emphasis across critical/social virtues, while PR emphasizes technical/systemic concepts. The 8.2% vs 5.3% network density difference suggests AFP texts engage with more complex conceptual interconnections.
|
||||
|
||||
---
|
||||
|
||||
## 7. Frequency Distribution
|
||||
|
||||
**Top 30 Virtues by Frequency:**
|
||||
|
||||
| Rank | Virtue | Count | % of Texts |
|
||||
|------|--------|-------|------------|
|
||||
| 1 | **Adaptability** | 17 | 12.7% |
|
||||
| 2 | Tension Management | 13 | 9.7% |
|
||||
| 3 | Accessibility | 13 | 9.7% |
|
||||
| 4 | Temporal Awareness | 11 | 8.2% |
|
||||
| 5 | Design | 11 | 8.2% |
|
||||
| 6 | Institutional Critique | 10 | 7.5% |
|
||||
| 7 | Agency | 10 | 7.5% |
|
||||
| 8 | Relationality | 10 | 7.5% |
|
||||
| 9 | Infrastructural Awareness | 10 | 7.5% |
|
||||
| 10 | Systems Thinking | 10 | 7.5% |
|
||||
| 11 | Plurality | 9 | 6.7% |
|
||||
| 12 | Transgression | 9 | 6.7% |
|
||||
| 13 | Collectivity | 8 | 6.0% |
|
||||
| 14 | Inheritance | 8 | 6.0% |
|
||||
| 15 | Authenticity | 7 | 5.2% |
|
||||
| 16 | Long-Term Vision | 7 | 5.2% |
|
||||
| 17 | Equity | 6 | 4.5% |
|
||||
| 18 | Capture Resistance | 6 | 4.5% |
|
||||
| 19 | Respect | 6 | 4.5% |
|
||||
| 20 | Cultural Awareness | 6 | 4.5% |
|
||||
| 21 | Spatial Awareness | 6 | 4.5% |
|
||||
| 22 | Interdependence | 6 | 4.5% |
|
||||
| 23 | Shared Responsibility | 6 | 4.5% |
|
||||
| 24 | Situational Awareness | 6 | 4.5% |
|
||||
| 25 | Memory | 5 | 3.7% |
|
||||
| 26 | Embodiment | 5 | 3.7% |
|
||||
| 27 | Inclusivity | 5 | 3.7% |
|
||||
| 28 | Balance | 5 | 3.7% |
|
||||
| 29 | Reciprocity | 5 | 3.7% |
|
||||
| 30 | Emergent Properties | 5 | 3.7% |
|
||||
|
||||
---
|
||||
|
||||
## 8. Key Insights and Implications
|
||||
|
||||
### 8.1 The Three Pillars of Protocol Ethics
|
||||
|
||||
The analysis reveals three conceptual pillars that structure this discourse:
|
||||
|
||||
1. **Adaptive Ethics** (centered on Adaptability and Design): The capacity to adjust, learn, and evolve protocols in response to changing conditions
|
||||
|
||||
2. **Relational Justice** (centered on Care, Consent, Equity, Inclusivity): Ethical frameworks emphasizing relationship, respect, and justice
|
||||
|
||||
3. **Systemic Resistance** (centered on Refusal, Subversion, Institutional Critique): Tactical opposition and critique of existing systems
|
||||
|
||||
### 8.2 The Adaptability Paradigm
|
||||
|
||||
The overwhelming centrality of **Adaptability** (highest frequency, highest connectivity) suggests this is the core organizing concept. It bridges:
|
||||
- **Ethical dimensions:** Equity, Care, Consent
|
||||
- **Operational dimensions:** Design, Iterative Development, Systems Thinking
|
||||
- **Resistance dimensions:** Capture Resistance, Resistance, Agency
|
||||
|
||||
### 8.3 Source Convergence and Divergence
|
||||
|
||||
- **Convergence:** Both sources treat Adaptability as central, suggesting a shared understanding that protocols must be capable of change
|
||||
- **Divergence:** AFP emphasizes critical/social dimensions (Institutional Critique, Collectivity), while PR emphasizes technical/systemic dimensions (Tension Management, Systems Thinking)
|
||||
- **Integration:** The most conceptually dense texts (highest network density) come from AFP, suggesting critical theory provides more complex conceptual interconnections
|
||||
|
||||
### 8.4 Unexpected Pairings
|
||||
|
||||
Several virtue pairs show unexpected strength:
|
||||
- **Care + Consent** (0.750): Suggests an ethics of care cannot exist without consent frameworks
|
||||
- **Refusal + Subversion** (0.400): Tactical language clusters together
|
||||
- **Equity + Inclusivity** (0.375): Justice requires both fair distribution and openness
|
||||
|
||||
### 8.5 The Resistance Cluster
|
||||
|
||||
The small cluster of resistance-focused texts (4 texts in Cluster 2) represents a distinct discourse that:
|
||||
- Appears only in AFP texts
|
||||
- Coheres around Refusal, Resistance, Subversion, Embodiment
|
||||
- Serves as a strategic counterpoint to the dominant protocol design discourse
|
||||
- May represent the critical "edge cases" that test protocol boundaries
|
||||
|
||||
---
|
||||
|
||||
## 9. Methodological Notes
|
||||
|
||||
### Analytic Techniques Used:
|
||||
1. **K-Means Clustering** (k=4): Identified text groups based on virtue profile similarity
|
||||
2. **Network Analysis**: mapped virtue co-occurrences and calculated centrality (degree = number of connections)
|
||||
3. **Jaccard Similarity**: normalized measure of virtue pair association (intersection/union)
|
||||
4. **Community Detection**: threshold-based clustering of highly connected virtue groups
|
||||
|
||||
### Limitations:
|
||||
- Small dataset (134 texts) limits statistical power
|
||||
- K-means clustering is sensitive to initialization (used deterministic starting points)
|
||||
- Binary coding (presence/absence) doesn't capture intensity or salience
|
||||
- Limited to virtues 1-5; other dimensions not analyzed
|
||||
|
||||
### Generated Files:
|
||||
| File | Description |
|
||||
|------|-------------|
|
||||
| `cooccurrence_matrix.csv` | 25×25 matrix of virtue co-occurrence counts |
|
||||
| `jaccard_similarity_matrix.csv` | 25×25 similarity matrix (Jaccard indices) |
|
||||
| `strong_associations.csv` | Top 50 virtue pairs with association metrics |
|
||||
| `virtue_profiles.json` | Individual virtue profiles for each text |
|
||||
|
||||
---
|
||||
|
||||
## 10. Recommendations for Further Analysis
|
||||
|
||||
1. **Qualitative Deep Dive:** Examine the 4 resistance-focused texts (Cluster 2) and the 10 authenticity-focused texts (Cluster 4) to understand the distinct discourses
|
||||
|
||||
2. **Temporal Analysis:** If dates are available, analyze how virtue frequencies change over time
|
||||
|
||||
3. **Semantic Mapping:** The Care+Consent pairing could be explored through close reading to understand the conceptual linkage
|
||||
|
||||
4. **Source-Specific Models:** Consider whether different theoretical frameworks might be needed for AFP vs. PR texts
|
||||
|
||||
5. **Expand to Other Codes:** Analysis currently limited to Virtue_1 through Virtue_5; expanding to other coding categories could reveal additional patterns
|
||||
|
||||
6. **Visualization:** Generate network graphs of virtue communities to make relationships visually explicit
|
||||
|
||||
---
|
||||
|
||||
*Analysis generated using Python standard library (no external packages required). All calculations are fully reproducible.*
|
||||
26
text_coding/analysis/cooccurrence_matrix.csv
Normal file
26
text_coding/analysis/cooccurrence_matrix.csv
Normal file
@@ -0,0 +1,26 @@
|
||||
Virtue,Adaptability,Tension Management,Accessibility,Temporal Awareness,Design,Institutional Critique,Agency,Relationality,Infrastructural Awareness,Systems Thinking,Plurality,Transgression,Collectivity,Inheritance,Authenticity,Long-Term Vision,Equity,Capture Resistance,Respect,Cultural Awareness,Spatial Awareness,Interdependence,Shared Responsibility,Situational Awareness,Memory
|
||||
Adaptability,,1,2,1,0,1,1,0,0,1,1,1,1,0,0,2,0,1,1,0,1,0,2,0,0
|
||||
Tension Management,1,,0,1,2,0,0,2,0,1,3,0,0,0,0,2,0,0,0,0,0,1,0,0,0
|
||||
Accessibility,2,0,,0,1,0,0,1,0,1,0,0,0,0,0,0,2,0,0,1,0,0,2,4,0
|
||||
Temporal Awareness,1,1,0,,0,0,0,0,0,0,1,0,1,1,0,2,0,0,0,0,0,1,1,0,0
|
||||
Design,0,2,1,0,,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0
|
||||
Institutional Critique,1,0,0,0,1,,1,0,2,1,0,0,0,0,0,0,2,0,0,0,1,0,0,0,0
|
||||
Agency,1,0,0,0,1,1,,0,1,1,1,0,1,1,1,0,0,0,0,0,1,0,0,1,1
|
||||
Relationality,0,2,1,0,0,0,0,,1,0,2,1,2,0,1,0,0,0,1,2,1,3,0,0,0
|
||||
Infrastructural Awareness,0,0,0,0,1,2,1,1,,2,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0
|
||||
Systems Thinking,1,1,1,0,1,1,1,0,2,,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0
|
||||
Plurality,1,3,0,1,0,0,1,2,0,1,,0,1,0,0,1,0,1,0,0,0,1,1,0,0
|
||||
Transgression,1,0,0,0,1,0,0,1,1,0,0,,0,1,1,0,0,1,3,2,0,1,1,0,0
|
||||
Collectivity,1,0,0,1,0,0,1,2,0,1,1,0,,1,2,0,0,0,0,0,0,1,1,0,0
|
||||
Inheritance,0,0,0,1,1,0,1,0,0,0,0,1,1,,3,0,0,0,0,1,0,0,0,0,2
|
||||
Authenticity,0,0,0,0,1,0,1,1,0,0,0,1,2,3,,0,0,0,0,1,0,0,0,0,0
|
||||
Long-Term Vision,2,2,0,2,0,0,0,0,0,1,1,0,0,0,0,,0,0,0,0,0,0,0,0,0
|
||||
Equity,0,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,,0,0,1,0,0,0,0,0
|
||||
Capture Resistance,1,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,0,,0,0,0,0,0,0,0
|
||||
Respect,1,0,0,0,0,0,0,1,0,0,0,3,0,0,0,0,0,0,,2,1,0,0,0,0
|
||||
Cultural Awareness,0,0,1,0,0,0,0,2,1,0,0,2,0,1,1,0,1,0,2,,2,0,0,0,0
|
||||
Spatial Awareness,1,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,1,2,,0,0,0,0
|
||||
Interdependence,0,1,0,1,0,0,0,3,0,0,1,1,1,0,0,0,0,0,0,0,0,,0,0,0
|
||||
Shared Responsibility,2,0,2,1,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,,1,0
|
||||
Situational Awareness,0,0,4,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,,0
|
||||
Memory,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,
|
||||
|
165
text_coding/analysis/detailed_analysis.py
Normal file
165
text_coding/analysis/detailed_analysis.py
Normal file
@@ -0,0 +1,165 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Detailed Analysis with Output Files
|
||||
"""
|
||||
|
||||
import csv
|
||||
from collections import defaultdict, Counter
|
||||
from itertools import combinations
|
||||
import math
|
||||
|
||||
def load_data(filename):
|
||||
with open(filename, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
rows = list(reader)
|
||||
return rows
|
||||
|
||||
def main():
|
||||
rows = load_data('coding.csv')
|
||||
|
||||
# Extract virtues
|
||||
virtue_cols = ['Virtue_1', 'Virtue_2', 'Virtue_3', 'Virtue_4', 'Virtue_5']
|
||||
all_virtues_per_row = []
|
||||
|
||||
for row in rows:
|
||||
virtues = []
|
||||
for col in virtue_cols:
|
||||
val = row.get(col, '').strip() if row.get(col) else ''
|
||||
if val:
|
||||
virtues.append(val)
|
||||
all_virtues_per_row.append(virtues)
|
||||
|
||||
# Get top virtues for matrix
|
||||
all_virtues_flat = [v for sublist in all_virtues_per_row for v in sublist]
|
||||
virtue_freq = Counter(all_virtues_flat)
|
||||
top_virtues = [v for v, c in virtue_freq.most_common(25)]
|
||||
|
||||
# Create co-occurrence matrix
|
||||
cooccurrence = Counter()
|
||||
for virtues in all_virtues_per_row:
|
||||
for pair in combinations(sorted(virtues), 2):
|
||||
cooccurrence[pair] += 1
|
||||
|
||||
# Write co-occurrence matrix
|
||||
with open('cooccurrence_matrix.csv', 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['Virtue'] + top_virtues)
|
||||
for v1 in top_virtues:
|
||||
row = [v1]
|
||||
for v2 in top_virtues:
|
||||
if v1 == v2:
|
||||
row.append('') # Diagonal
|
||||
else:
|
||||
count = cooccurrence.get((min(v1,v2), max(v1,v2)), 0)
|
||||
row.append(count)
|
||||
writer.writerow(row)
|
||||
|
||||
print("Created: cooccurrence_matrix.csv")
|
||||
|
||||
# Create similarity matrix (Jaccard)
|
||||
virtue_sets = defaultdict(set)
|
||||
for idx, virtues in enumerate(all_virtues_per_row):
|
||||
for v in virtues:
|
||||
virtue_sets[v].add(idx)
|
||||
|
||||
with open('jaccard_similarity_matrix.csv', 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['Virtue'] + top_virtues)
|
||||
for v1 in top_virtues:
|
||||
row = [v1]
|
||||
for v2 in top_virtues:
|
||||
if v1 == v2:
|
||||
row.append('1.0')
|
||||
else:
|
||||
set1 = virtue_sets[v1]
|
||||
set2 = virtue_sets[v2]
|
||||
jaccard = len(set1 & set2) / len(set1 | set2) if (set1 | set2) else 0
|
||||
row.append(f"{jaccard:.3f}")
|
||||
writer.writerow(row)
|
||||
|
||||
print("Created: jaccard_similarity_matrix.csv")
|
||||
|
||||
# Centrality analysis - which virtues connect most to others
|
||||
print("\n" + "=" * 70)
|
||||
print("VIRTUE NETWORK CENTRALITY ANALYSIS")
|
||||
print("=" * 70)
|
||||
|
||||
# Degree centrality (how many different virtues each connects to)
|
||||
connections = defaultdict(set)
|
||||
for (v1, v2), count in cooccurrence.items():
|
||||
if count >= 1:
|
||||
connections[v1].add(v2)
|
||||
connections[v2].add(v1)
|
||||
|
||||
centrality = [(v, len(connections[v])) for v in virtue_freq.keys()]
|
||||
centrality.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
print("\nTop 'Hub' Virtues (connect to most other virtue types):")
|
||||
print(f"{'Virtue':<40} {'Connections':<12}")
|
||||
print("-" * 55)
|
||||
for virtue, degree in centrality[:15]:
|
||||
nearby = list(connections[virtue])[:5]
|
||||
print(f"{virtue:<40} {degree:<12} → {', '.join(nearby)}")
|
||||
|
||||
# Network density by source
|
||||
print("\n" + "=" * 70)
|
||||
print("NETWORK COMPLEXITY BY SOURCE")
|
||||
print("=" * 70)
|
||||
|
||||
source_per_row = [row.get('Source', 'Unknown') for row in rows]
|
||||
|
||||
for source in ['AFP', 'PR']:
|
||||
source_indices = [i for i, s in enumerate(source_per_row) if s == source]
|
||||
source_pairs = Counter()
|
||||
|
||||
for idx in source_indices:
|
||||
virtues = all_virtues_per_row[idx]
|
||||
for pair in combinations(sorted(virtues), 2):
|
||||
source_pairs[pair] += 1
|
||||
|
||||
unique_connections = len(source_pairs)
|
||||
total_texts = len(source_indices)
|
||||
avg_pairs = sum(source_pairs.values()) / total_texts if total_texts else 0
|
||||
|
||||
print(f"\n{source}:")
|
||||
print(f" Texts: {total_texts}")
|
||||
print(f" Unique virtue pairs: {unique_connections}")
|
||||
print(f" Avg pairs per text: {avg_pairs:.2f}")
|
||||
print(f" Network density: {unique_connections / (len(virtue_freq) * (len(virtue_freq)-1) / 2) * 100:.1f}%")
|
||||
|
||||
# Create profile for each text (export)
|
||||
with open('virtue_profiles.json', 'w') as f:
|
||||
profiles = []
|
||||
for i, (row, virtues) in enumerate(zip(rows, all_virtues_per_row)):
|
||||
profiles.append({
|
||||
'id': i,
|
||||
'source': row.get('Source', ''),
|
||||
'virtues': virtues,
|
||||
'virtue_count': len(virtues)
|
||||
})
|
||||
import json
|
||||
json.dump(profiles, f, indent=2)
|
||||
|
||||
print("\nCreated: virtue_profiles.json")
|
||||
|
||||
# Summary of strongest associations
|
||||
with open('strong_associations.csv', 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['Virtue_1', 'Virtue_2', 'Co_count', 'Jaccard', 'Observed', 'Expected'])
|
||||
|
||||
for (v1, v2), count in cooccurrence.most_common(50):
|
||||
set1 = virtue_sets[v1]
|
||||
set2 = virtue_sets[v2]
|
||||
jaccard = len(set1 & set2) / len(set1 | set2) if (set1 | set2) else 0
|
||||
|
||||
# Expected co-occurrence if random
|
||||
p1 = len(set1) / len(rows)
|
||||
p2 = len(set2) / len(rows)
|
||||
expected = len(rows) * p1 * p2
|
||||
|
||||
writer.writerow([v1, v2, count, f"{jaccard:.3f}", count, f"{expected:.2f}"])
|
||||
|
||||
print("Created: strong_associations.csv")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
26
text_coding/analysis/jaccard_similarity_matrix.csv
Normal file
26
text_coding/analysis/jaccard_similarity_matrix.csv
Normal file
@@ -0,0 +1,26 @@
|
||||
Virtue,Adaptability,Tension Management,Accessibility,Temporal Awareness,Design,Institutional Critique,Agency,Relationality,Infrastructural Awareness,Systems Thinking,Plurality,Transgression,Collectivity,Inheritance,Authenticity,Long-Term Vision,Equity,Capture Resistance,Respect,Cultural Awareness,Spatial Awareness,Interdependence,Shared Responsibility,Situational Awareness,Memory
|
||||
Adaptability,1.0,0.034,0.071,0.037,0.000,0.038,0.038,0.000,0.000,0.038,0.040,0.040,0.042,0.000,0.000,0.091,0.000,0.045,0.045,0.000,0.045,0.000,0.095,0.000,0.000
|
||||
Tension Management,0.034,1.0,0.000,0.043,0.091,0.000,0.000,0.095,0.000,0.045,0.158,0.000,0.000,0.000,0.000,0.111,0.000,0.000,0.000,0.000,0.000,0.056,0.000,0.000,0.000
|
||||
Accessibility,0.071,0.000,1.0,0.000,0.043,0.000,0.000,0.045,0.000,0.045,0.000,0.000,0.000,0.000,0.000,0.000,0.118,0.000,0.000,0.056,0.000,0.000,0.118,0.267,0.000
|
||||
Temporal Awareness,0.037,0.043,0.000,1.0,0.000,0.000,0.000,0.000,0.000,0.000,0.053,0.000,0.056,0.056,0.000,0.125,0.000,0.000,0.000,0.000,0.000,0.062,0.062,0.000,0.000
|
||||
Design,0.000,0.091,0.043,0.000,1.0,0.050,0.050,0.000,0.050,0.050,0.000,0.053,0.000,0.056,0.059,0.000,0.062,0.062,0.000,0.000,0.000,0.000,0.000,0.000,0.000
|
||||
Institutional Critique,0.038,0.000,0.000,0.000,0.050,1.0,0.053,0.000,0.111,0.053,0.000,0.000,0.000,0.000,0.000,0.000,0.143,0.000,0.000,0.000,0.067,0.000,0.000,0.000,0.000
|
||||
Agency,0.038,0.000,0.000,0.000,0.050,0.053,1.0,0.000,0.053,0.053,0.056,0.000,0.059,0.059,0.062,0.000,0.000,0.000,0.000,0.000,0.067,0.000,0.000,0.067,0.071
|
||||
Relationality,0.000,0.095,0.045,0.000,0.000,0.000,0.000,1.0,0.053,0.000,0.118,0.056,0.125,0.000,0.062,0.000,0.000,0.000,0.067,0.143,0.067,0.231,0.000,0.000,0.000
|
||||
Infrastructural Awareness,0.000,0.000,0.000,0.000,0.050,0.111,0.053,0.053,1.0,0.111,0.000,0.056,0.000,0.000,0.000,0.000,0.000,0.067,0.000,0.067,0.000,0.000,0.000,0.000,0.000
|
||||
Systems Thinking,0.038,0.045,0.045,0.000,0.050,0.053,0.053,0.000,0.111,1.0,0.056,0.000,0.059,0.000,0.000,0.062,0.000,0.000,0.000,0.000,0.067,0.000,0.000,0.067,0.000
|
||||
Plurality,0.040,0.158,0.000,0.053,0.000,0.000,0.056,0.118,0.000,0.056,1.0,0.000,0.062,0.000,0.000,0.067,0.000,0.071,0.000,0.000,0.000,0.071,0.071,0.000,0.000
|
||||
Transgression,0.040,0.000,0.000,0.000,0.053,0.000,0.000,0.056,0.056,0.000,0.000,1.0,0.000,0.062,0.067,0.000,0.000,0.071,0.250,0.154,0.000,0.071,0.071,0.000,0.000
|
||||
Collectivity,0.042,0.000,0.000,0.056,0.000,0.000,0.059,0.125,0.000,0.059,0.062,0.000,1.0,0.067,0.154,0.000,0.000,0.000,0.000,0.000,0.000,0.077,0.077,0.000,0.000
|
||||
Inheritance,0.000,0.000,0.000,0.056,0.056,0.000,0.059,0.000,0.000,0.000,0.000,0.062,0.067,1.0,0.250,0.000,0.000,0.000,0.000,0.077,0.000,0.000,0.000,0.000,0.182
|
||||
Authenticity,0.000,0.000,0.000,0.000,0.059,0.000,0.062,0.062,0.000,0.000,0.000,0.067,0.154,0.250,1.0,0.000,0.000,0.000,0.000,0.083,0.000,0.000,0.000,0.000,0.000
|
||||
Long-Term Vision,0.091,0.111,0.000,0.125,0.000,0.000,0.000,0.000,0.000,0.062,0.067,0.000,0.000,0.000,0.000,1.0,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000
|
||||
Equity,0.000,0.000,0.118,0.000,0.062,0.143,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,1.0,0.000,0.000,0.091,0.000,0.000,0.000,0.000,0.000
|
||||
Capture Resistance,0.045,0.000,0.000,0.000,0.062,0.000,0.000,0.000,0.067,0.000,0.071,0.071,0.000,0.000,0.000,0.000,0.000,1.0,0.000,0.000,0.000,0.000,0.000,0.000,0.000
|
||||
Respect,0.045,0.000,0.000,0.000,0.000,0.000,0.000,0.067,0.000,0.000,0.000,0.250,0.000,0.000,0.000,0.000,0.000,0.000,1.0,0.200,0.091,0.000,0.000,0.000,0.000
|
||||
Cultural Awareness,0.000,0.000,0.056,0.000,0.000,0.000,0.000,0.143,0.067,0.000,0.000,0.154,0.000,0.077,0.083,0.000,0.091,0.000,0.200,1.0,0.200,0.000,0.000,0.000,0.000
|
||||
Spatial Awareness,0.045,0.000,0.000,0.000,0.000,0.067,0.067,0.067,0.000,0.067,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.091,0.200,1.0,0.000,0.000,0.000,0.000
|
||||
Interdependence,0.000,0.056,0.000,0.062,0.000,0.000,0.000,0.231,0.000,0.000,0.071,0.071,0.077,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,1.0,0.000,0.000,0.000
|
||||
Shared Responsibility,0.095,0.000,0.118,0.062,0.000,0.000,0.000,0.000,0.000,0.000,0.071,0.071,0.077,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,1.0,0.091,0.000
|
||||
Situational Awareness,0.000,0.000,0.267,0.000,0.000,0.000,0.067,0.000,0.000,0.067,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.091,1.0,0.000
|
||||
Memory,0.000,0.000,0.000,0.000,0.000,0.000,0.071,0.000,0.000,0.000,0.000,0.000,0.000,0.182,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,0.000,1.0
|
||||
|
51
text_coding/analysis/strong_associations.csv
Normal file
51
text_coding/analysis/strong_associations.csv
Normal file
@@ -0,0 +1,51 @@
|
||||
Virtue_1,Virtue_2,Co_count,Jaccard,Observed,Expected
|
||||
Accessibility,Situational Awareness,4,0.267,4,0.58
|
||||
Equity,Inclusivity,3,0.375,3,0.22
|
||||
Balance,Tension Management,3,0.200,3,0.49
|
||||
Agency,Freedom,3,0.300,3,0.22
|
||||
Empathy,Relationality,3,0.273,3,0.30
|
||||
Respect,Transgression,3,0.250,3,0.40
|
||||
Authenticity,Inheritance,3,0.250,3,0.42
|
||||
Care,Consent,3,0.750,3,0.09
|
||||
Interdependence,Relationality,3,0.231,3,0.45
|
||||
Plurality,Tension Management,3,0.158,3,0.87
|
||||
Embodiment,Institutional Critique,2,0.154,2,0.37
|
||||
Embodiment,Refusal,2,0.286,2,0.15
|
||||
Refusal,Resistance,2,0.333,2,0.12
|
||||
Refusal,Subversion,2,0.400,2,0.09
|
||||
Resistance,Subversion,2,0.400,2,0.09
|
||||
Equity,Institutional Critique,2,0.143,2,0.45
|
||||
Adaptability,Collaboration,2,0.111,2,0.38
|
||||
Agency,Resistance,2,0.167,2,0.30
|
||||
Imitation,Memory,2,0.286,2,0.15
|
||||
Agency,Autonomy,2,0.182,2,0.22
|
||||
Cultural Awareness,Respect,2,0.200,2,0.27
|
||||
Cultural Awareness,Spatial Awareness,2,0.200,2,0.27
|
||||
Cultural Awareness,Empathy,2,0.250,2,0.18
|
||||
Cultural Awareness,Relationality,2,0.143,2,0.45
|
||||
Cultural Awareness,Transgression,2,0.154,2,0.40
|
||||
Inheritance,Memory,2,0.182,2,0.30
|
||||
Authenticity,Collectivity,2,0.154,2,0.42
|
||||
Collectivity,Relationality,2,0.125,2,0.60
|
||||
Adaptability,Iterative Development,2,0.105,2,0.51
|
||||
Adaptability,Long-Term Vision,2,0.091,2,0.89
|
||||
Plurality,Relationality,2,0.118,2,0.67
|
||||
Embodiment,Groundedness,2,0.333,2,0.11
|
||||
Historical Awareness,Inheritance,2,0.200,2,0.24
|
||||
Inheritance,Refusal,2,0.200,2,0.24
|
||||
Relationality,Tension Management,2,0.095,2,0.97
|
||||
Accessibility,Equity,2,0.118,2,0.58
|
||||
Accessibility,Adaptability,2,0.071,2,1.65
|
||||
Accessibility,Shared Responsibility,2,0.118,2,0.58
|
||||
Adaptability,Shared Responsibility,2,0.095,2,0.76
|
||||
Accessibility,Care,2,0.133,2,0.39
|
||||
Infrastructural Awareness,Institutional Critique,2,0.111,2,0.75
|
||||
Infrastructural Awareness,Systems Thinking,2,0.111,2,0.75
|
||||
Adaptability,Replicability,2,0.100,2,0.63
|
||||
Long-Term Vision,Temporal Awareness,2,0.125,2,0.57
|
||||
Accessibility,Reflexivity,2,0.125,2,0.49
|
||||
Design,Tension Management,2,0.091,2,1.07
|
||||
Complex Systems Tolerance,Tension Management,2,0.133,2,0.39
|
||||
Long-Term Vision,Tension Management,2,0.111,2,0.68
|
||||
Imitation,Replicability,2,0.286,2,0.15
|
||||
Authenticity,Role Awareness,2,0.200,2,0.26
|
||||
|
1313
text_coding/analysis/virtue_profiles.json
Normal file
1313
text_coding/analysis/virtue_profiles.json
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user