// Arup Guha
// 5/3/2017
// Solution (maybe?) to 2017 NAIPC Problem G: Apple Market
// Passes Kattis's weak data - which was both weak in testing correctness and run time.
// But, fundamentally, I am at least trying a an approach that isn't obviously wrong. It's
// very hard for me to tell if I implemented my approach properly.

// Roughly speaking, my approach is to have lots of boxes of stores as single vertices, of sizes
// (1,2,4,8,16,32) x 1,2,4,8,16,32) so 36 sizes max, for each size, there is a box that starts at
// every possible top left corner, so lots of boxes. For example, on a 47 x 43 grid, the number
// of 32 x 16 boxes I have is (47-32+1)x(43-16+1) = 16 x 27 = 432.

import java.util.*;
import java.io.*;

public class applemarket {

	public static int r;
	public static int c;
	public static int numCust;
	public static long[][] apples;
	public static long[][] cumSum;

	public static HashMap<Integer,Integer> pow2Map;

	public static void main(String[] args) throws Exception {

		// Because I am lazy.
		pow2Map = new HashMap<Integer,Integer>();
		for (int i=0; i<10; i++) pow2Map.put((1<<i), i);

		BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
		StringTokenizer tok = new StringTokenizer(stdin.readLine());
		r = Integer.parseInt(tok.nextToken());
		c = Integer.parseInt(tok.nextToken());
		numCust = Integer.parseInt(tok.nextToken());
		int n = r*c;
		apples = new long[r][c];
		cumSum = new long[r+1][c+1];

		// Read in who is selling what.
		for (int i=0; i<r; i++) {
			tok = new StringTokenizer(stdin.readLine());
			long rowS = 0;
			for (int j=0; j<c; j++) {
				apples[i][j] = Integer.parseInt(tok.nextToken());
				rowS += apples[i][j];
				cumSum[i+1][j+1] = cumSum[i][j+1] + rowS;
			}
		}

		// And how much and where everyone can buy from.
		int[][] customer = new int[numCust][4];
		long[] money = new long[numCust];
		for (int i=0; i<numCust; i++) {
			tok = new StringTokenizer(stdin.readLine());
			for (int j=0; j<4; j++)
				customer[i][j] = Integer.parseInt(tok.nextToken())-1;
			money[i] =  Integer.parseInt(tok.nextToken());
		}

		ArrayList<Integer> rList = getList(r);
		ArrayList<Integer> cList = getList(c);

		// Calculate the number of vertices in our new graph, just for the stores.
		int numVStores = 0;
		int[][] start = new int[rList.size()][cList.size()];
		for (int i=0; i<rList.size(); i++) {
			for (int j=0; j<cList.size(); j++) {
				start[i][j] = numVStores;
				numVStores += (r-rList.get(i)+1)*(c-cList.get(j)+1);
			}
		}

		Dinic g = new Dinic(numVStores+numCust);

		// Only link from source to stores that are direct.
		for (int i=0; i<r; i++)
			for (int j=0; j<c; j++)
				g.add(g.s, c*i+j, apples[i][j], 0);

		// Flow beteween "groups" of stores.
		for (int i=0; i<rList.size(); i++) {
			for (int j=0; j<cList.size(); j++) {

				// We put these in already.
				if (i == 0 && j == 0) continue;

				int rDim = rList.get(i), cDim = cList.get(j);

				// Linking stores of size x by y FROM 2 stores of size x/2 by y.
				if (i > 0) {

					int sNum = 0;
					int prevRDim = rList.get(i-1);

					// Go through each rectangle in this range, they do overlap by all but one column or row.
					for (int x=0; x<=r-rDim; x++) {
						for (int y=0; y<=c-cDim; y++) {

							// Do this to get the capacity of the two sets of stores that feed into this one.
							long cap1 = cumSum[x+prevRDim][y+cDim] - cumSum[x][y+cDim] - cumSum[x+prevRDim][y] + cumSum[x][y];
							long cap2 = cumSum[x+rDim][y+cDim] - cumSum[x+prevRDim][y+cDim] - cumSum[x+rDim][y] + cumSum[x+prevRDim][y];

							// These are the corresponding edges.
							g.add(start[i-1][j] + x*(c-cDim+1)+y,     start[i][j]+sNum, cap1, 0);
							g.add(start[i-1][j] + (x+prevRDim)*(c-cDim+1)+y, start[i][j]+sNum, cap2, 0);
							sNum++;
						}
					}

				}

				// Linking stores of size x by y FROM 2 stores of size x by y/2.
				if (j > 0) {

					int sNum = 0;
					int prevCDim = cList.get(j-1);

					// Go through each rectangle in this range, they do overlap by all but one column or row.
					for (int x=0; x<=r-rDim; x++) {
						for (int y=0; y<=c-cDim; y++) {

							// Do this to get the capacity of the two sets of stores that feed into this one.
							long cap1 = cumSum[x+rDim][y+prevCDim] - cumSum[x][y+prevCDim] - cumSum[x+rDim][y] + cumSum[x][y];
							long cap2 = cumSum[x+rDim][y+cDim] - cumSum[x+rDim][y+prevCDim] - cumSum[x][y+cDim] + cumSum[x][y+prevCDim];

							// These are the corresponding edges.
							g.add(start[i][j-1] + x*(c-prevCDim+1)+y,          start[i][j]+sNum, cap1, 0);
							g.add(start[i][j-1] + x*(c-prevCDim+1)+y+prevCDim, start[i][j]+sNum, cap2, 0);
							sNum++;
						}
					}

				}

			}
		} // end putting in edges between groups of stores.

		// Now, customers to sink
		for (int i=0; i<numCust; i++)
			g.add(numVStores+i, g.t, money[i], 0);

		// Finally stores to customers, making use of biggest boxes.
		for (int i=0; i<numCust; i++) {

			int xSize = customer[i][1] - customer[i][0] + 1;
			int curX = customer[i][0];

			// Go through rows of grid.
			while (curX <= customer[i][1]) {

				int boxXSize = Integer.highestOneBit(xSize);
				int ySize = customer[i][3] - customer[i][2] + 1;
				int curY = customer[i][2];

				// Go through biggest columns.
				while (curY <= customer[i][3]) {

					int boxYSize = Integer.highestOneBit(ySize);
					int vFrom = start[pow2Map.get(boxXSize)][pow2Map.get(boxYSize)] + curX*(c-boxYSize+1) + curY;
					g.add(vFrom, numVStores+i, money[i], 0);

					// Update new y location.
					ySize -= boxYSize;
					curY += boxYSize;
				}

				// Update new x location.
				xSize -= boxXSize;
				curX += boxXSize;
			}
		} // end all customer loop.

		// Ta da!
		System.out.println(g.flow());
	}

	// Returns list of all powers of 2 <= n.
	public static ArrayList<Integer> getList(int n) {
		ArrayList<Integer> res = new ArrayList<Integer>();
		int i = 0;
		while ((1<<i) <= n) {
			res.add((1<<i));
			i++;
		}
		return res;
	}
}

class Edge {
	int v1, v2;
	long cap, flow;
	Edge rev;
	Edge(int V1, int V2, long Cap, long Flow) {
		v1 = V1;
		v2 = V2;
		cap = Cap;
		flow = Flow;
	}
}

class Dinic {

	// Queue for the top level BFS.
	public ArrayDeque<Integer> q;

	// Stores the graph.
	public ArrayList<Edge>[] adj;
	public int n;

	// s = source, t = sink
	public int s;
	public int t;


	// For BFS.
	public boolean[] blocked;
	public int[] dist;

	final public static int oo = (int)1E9;

	// Constructor.
	public Dinic (int N) {

		// s is the source, t is the sink, add these as last two nodes.
		n = N; s = n++; t = n++;

		// Everything else is empty.
		blocked = new boolean[n];
		dist = new int[n];
		q = new ArrayDeque<Integer>();
		adj = new ArrayList[n];
		for(int i = 0; i < n; ++i)
			adj[i] = new ArrayList<Edge>();
	}

	// Just adds an edge and ALSO adds it going backwards.
	public void add(int v1, int v2, long cap, long flow) {
		Edge e = new Edge(v1, v2, cap, flow);
		Edge rev = new Edge(v2, v1, 0, 0);
		adj[v1].add(rev.rev = e);
		adj[v2].add(e.rev = rev);
	}

	// Runs other level BFS.
	public boolean bfs() {

		// Set up BFS
		q.clear();
		Arrays.fill(dist, -1);
		dist[t] = 0;
		q.add(t);

		// Go backwards from sink looking for source.
		// We just care to mark distances left to the sink.
		while(!q.isEmpty()) {
			int node = q.poll();
			if(node == s)
				return true;
			for(Edge e : adj[node]) {
				if(e.rev.cap > e.rev.flow && dist[e.v2] == -1) {
					dist[e.v2] = dist[node] + 1;
					q.add(e.v2);
				}
			}
		}

		// Augmenting paths exist iff we made it back to the source.
		return dist[s] != -1;
	}

	// Runs inner DFS in Dinic's, from node pos with a flow of min.
	public long dfs(int pos, long min) {

		// Made it to the sink, we're good, return this as our max flow for the augmenting path.
		if(pos == t)
			return min;
		long flow = 0;

		// Try each edge from here.
		for(Edge e : adj[pos]) {
			long cur = 0;

			// If our destination isn't blocked and it's 1 closer to the sink and there's flow, we
			// can go this way.
			if(!blocked[e.v2] && dist[e.v2] == dist[pos]-1 && e.cap - e.flow > 0) {

				// Recursively run dfs from here - limiting flow based on current and what's left on this edge.
				cur = dfs(e.v2, Math.min(min-flow, e.cap - e.flow));

				// Add the flow through this edge and subtract it from the reverse flow.
				e.flow += cur;
				e.rev.flow = -e.flow;

				// Add to the total flow.
				flow += cur;
			}

			// No more can go through, we're good.
			if(flow == min)
				return flow;
		}

		// mark if this node is now blocked.
		blocked[pos] = flow != min;

		// This is the flow
		return flow;
	}

	public long flow() {
		clear();
		long ret = 0;

		// Run a top level BFS.
		while(bfs()) {

			// Reset this.
			Arrays.fill(blocked, false);

			// Run multiple DFS's until there is no flow left to push through.
			ret += dfs(s, oo);
		}
		return ret;
	}

	// Just resets flow through all edges to be 0.
	public void clear() {
		for(ArrayList<Edge> edges : adj)
			for(Edge e : edges)
				e.flow = 0;
	}
}