diff --git a/pdf.c b/pdf.c
index 39f9fb16a9b4a0a788afed4d75cf20de7270e447..eae4b047abc483461bd361038a190391e396f3c5 100644
--- a/pdf.c
+++ b/pdf.c
@@ -319,8 +319,9 @@ struct Env {
 	Catalog_T            catalog;  /* Catalog object and document structure */
 	TextState_T          tstate;   /* current text state */
 
-	unsigned int paren_nest_depth;
-	unsigned int array_nest_depth;
+	unsigned int paren_nest_depth; /* String nesting depth */
+	unsigned int array_nest_depth; /* Array nesting depth */
+	unsigned int dict_nest_depth; /* Dictionary nesting depth */
 
 };
 
@@ -1166,6 +1167,15 @@ act_rldstring(const HParseResult *p, void *u)
 	return H_MAKE_BYTES(result_bytes, bytes_required);
 }
 
+/*
+ * The following functions implement a fix for being able to exhaust memory by nesting strings, objects and dicts too deeply.
+ * A counter in aux for each type is incremented on each opening token, and decremented on each closing token. The counter is not decremented when it is 0 to avoid underflow.
+ *
+ * The counter is global across the document (but respects document structure, e.g. parentheses in streams don't count, unless for some reason they're parsed with the lparen rule.)
+ * The validations make the parse fail if this nesting depth is exceeded. Because currently there are no diagnostic messages, this can probably result in unexpected parses.
+ *
+ */
+
 #define PAREN_MAX_NEST_DEPTH 256
 
 bool
@@ -1181,7 +1191,7 @@ act_lparen(const HParseResult *p, void *u)
 	struct Env *aux = (struct Env*) u;
 	aux->paren_nest_depth += 1;
 
-	return H_MAKE_UINT(p->ast->uint);
+	return H_MAKE_UINT(H_CAST_UINT(p->ast));
 }
 
 HParsedToken*
@@ -1191,7 +1201,7 @@ act_rparen(const HParseResult *p, void *u)
 	if(aux->paren_nest_depth > 0)
 		aux->paren_nest_depth -= 1;
 
-	return H_MAKE_UINT(p->ast->uint);
+	return H_MAKE_UINT(H_CAST_UINT(p->ast));
 }
 
 #define ARRAY_MAX_NEST_DEPTH 256
@@ -1209,7 +1219,7 @@ act_array_begin(const HParseResult *p, void *u)
 	struct Env *aux = (struct Env*) u;
 	aux->array_nest_depth += 1;
 
-	return H_MAKE_UINT(p->ast->uint);
+	return H_MAKE_UINT(H_CAST_UINT(p->ast));
 }
 
 HParsedToken *
@@ -1219,9 +1229,36 @@ act_array_end(const HParseResult *p, void *u)
 	if(aux->array_nest_depth > 0)
 		aux->array_nest_depth -= 1;
 
-	return H_MAKE_UINT(p->ast->uint);
+	return H_MAKE_UINT(H_CAST_UINT(p->ast));
+}
+
+#define DICT_MAX_NEST_DEPTH 256
+
+bool
+validate_dopen(HParseResult *p, void *u)
+{
+	struct Env *aux = (struct Env*) u;
+	return aux->dict_nest_depth < DICT_MAX_NEST_DEPTH;
+}
+
+HParsedToken*
+act_dopen(const HParseResult *p, void *u)
+{
+	struct Env *aux = (struct Env*) u;
+	aux->dict_nest_depth += 1;
+
+	return H_MAKE_BYTES(H_CAST_BYTES(p->ast).token, H_CAST_BYTES(p->ast).len);
 }
 
+HParsedToken*
+act_dclose(const HParseResult *p, void *u)
+{
+	struct Env *aux = (struct Env*) u;
+	if(aux->dict_nest_depth > 0)
+		aux->dict_nest_depth -= 1;
+
+	return H_MAKE_BYTES(H_CAST_BYTES(p->ast).token, H_CAST_BYTES(p->ast).len);
+}
 
 /*
  * ********************************************************************
@@ -2515,8 +2552,8 @@ init_parser(struct Env *aux)
 	H_RULE(obj,	CHX(robj, name, dobj));
 
 	/* dictionaries */
-	H_RULE(dopen,	LIT("<<")); // XXX: limit nesting
-	H_RULE(dclose,	LIT(">>"));
+	H_AVDRULE(dopen,	LIT("<<"), aux); // XXX: limit nesting
+	H_ADRULE(dclose,	LIT(">>"), aux);
 	H_RULE(k_v,	CHX(CHX(SEQ(name, wel,ws, obj),
 				SEQ(name, CHX(name,dobj))),
 			    VIOL(SEQ(name, wel,ws), "Key with no value (severity=2)")));