diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 7dcd7fa3791d5bb827304e629d8a988c77ba6dfa..720b94f6144976c0204836cc45aaabfd7825be31 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -4,6 +4,10 @@ Tue Aug 24 23:43:03 1999  Mark Mitchell  <mark@codesourcery.com>
 	NOTE_INSN_EH_REGION_END correctly.
 
 Tue Aug 24 23:26:44 1999  Michael Tiemann  <tiemann@holodeck.cygnus.com>
+			  Jeff Law <law@cygnus.com>
+
+	* loop.c (strength_reduce): Automatically unroll loops if the
+	unrolled loop size is smaller than the rolled loop size.
 
 	* loop.c (insert_bct): Replace use of sdiv_optab with asr_optab
 	and delete comment that code should be rewritten.
diff --git a/gcc/loop.c b/gcc/loop.c
index 4d18f5ee65e7f4f752830f90b094657467f2124a..931c08318efb8237776ecb301b33ccd0da07fde5 100644
--- a/gcc/loop.c
+++ b/gcc/loop.c
@@ -3705,6 +3705,7 @@ strength_reduce (scan_start, end, loop_top, insn_count,
   int n_extra_increment;
   struct loop_info loop_iteration_info;
   struct loop_info *loop_info = &loop_iteration_info;
+  int unrolled_insn_copies;
 
   /* If scan_start points to the loop exit test, we have to be wary of
      subversive use of gotos inside expression statements.  */
@@ -5133,11 +5134,40 @@ strength_reduce (scan_start, end, loop_top, insn_count,
 	INSN_CODE (p) = -1;
       }
 
+  if (loop_info->n_iterations > 0)
+    {
+      /* When we completely unroll a loop we will likely not need the increment
+	 of the loop BIV and we will not need the conditional branch at the
+	 end of the loop.  */
+      unrolled_insn_copies = insn_count - 2;
+
+#ifdef HAVE_cc0
+      /* When we completely unroll a loop on a HAVE_cc0 machine we will not
+	 need the comparison before the conditional branch at the end of the
+	 loop.  */
+      unrolled_insn_copies = insn_count - 2;
+#endif
+
+      /* We'll need one copy for each loop iteration.  */
+      unrolled_insn_copies *= loop_info->n_iterations;
+
+      /* A little slop to account for the ability to remove initialization
+	 code, better CSE, and other secondary benefits of completely
+	 unrolling some loops.  */
+      unrolled_insn_copies -= 1;
+
+      /* Clamp the value.  */
+      if (unrolled_insn_copies < 0)
+	unrolled_insn_copies = 0;
+    }
+  
   /* Unroll loops from within strength reduction so that we can use the
      induction variable information that strength_reduce has already
-     collected.  */
-  
-  if (unroll_p)
+     collected.  Always unroll loops that would be as small or smaller
+     unrolled than when rolled.  */
+  if (unroll_p
+      || (loop_info->n_iterations > 0
+	  && unrolled_insn_copies <= insn_count))
     unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
 		 loop_info, 1);